From 1739e75fecf0cb9507dc950bba1716f40be1e609 Mon Sep 17 00:00:00 2001
From: Marcelo Vanzin <vanzin@cloudera.com>
Date: Mon, 8 Aug 2016 10:34:54 -0700
Subject: [PATCH] [SPARK-16586][CORE] Handle JVM errors printed to stdout.

Some very rare JVM errors are printed to stdout, and that confuses
the code in spark-class. So add a check so that those cases are
detected and the proper error message is shown to the user.

Tested by running spark-submit after setting "ulimit -v 32000".

Closes #14231

Author: Marcelo Vanzin <vanzin@cloudera.com>

Closes #14508 from vanzin/SPARK-16586.
---
 bin/spark-class | 9 +++++++++
 1 file changed, 9 insertions(+)

diff --git a/bin/spark-class b/bin/spark-class
index 658e076bc0..377c8d1add 100755
--- a/bin/spark-class
+++ b/bin/spark-class
@@ -80,6 +80,15 @@ done < <(build_command "$@")
 COUNT=${#CMD[@]}
 LAST=$((COUNT - 1))
 LAUNCHER_EXIT_CODE=${CMD[$LAST]}
+
+# Certain JVM failures result in errors being printed to stdout (instead of stderr), which causes
+# the code that parses the output of the launcher to get confused. In those cases, check if the
+# exit code is an integer, and if it's not, handle it as a special error case.
+if ! [[ $LAUNCHER_EXIT_CODE =~ ^[0-9]+$ ]]; then
+  echo "${CMD[@]}" | head -n-1 1>&2
+  exit 1
+fi
+
 if [ $LAUNCHER_EXIT_CODE != 0 ]; then
   exit $LAUNCHER_EXIT_CODE
 fi
-- 
GitLab