KevinGre@hn0-keving:~/probe$ KevinGre@hn0-keving:~/probe$ KevinGre@hn0-keving:~/probe$ KevinGre@hn0-keving:~/probe$ cat probe.py import sys import os import pyspark def getEnv(name): return name + "=" + (os.environ[name] if name in os.environ else "not set.") def getState(): return sys.version + "\n" + getEnv("PYTHONHASHSEED") + "\n" + getEnv("PYSPARK_PYTHON") sc = pyspark.SparkContext.getOrCreate() print("driver:") print(getState()) print("executors:") print(sc.parallelize([1]).map(lambda x: getState()).collect()[0]) KKevinGre@hn0-keving:~/probe$ printenv XDG_SESSION_ID=1 SPARK_HOME=/usr/hdp/current/spark-client TERM=xterm SHELL=/bin/bash SSH_CLIENT=131.107.160.1 30017 22 OLDPWD=/var/log/livy SSH_TTY=/dev/pts/0 USER=KevinGre LS_COLORS=rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lz=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36: PYSPARK_PYTHON=/usr/bin/anaconda/bin/python PYSPARK_DRIVER_PYTHON=/usr/bin/anaconda/bin/python MAIL=/var/mail/KevinGre PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games PWD=/home/KevinGre/probe JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64 LANG=en_US.UTF-8 HADOOP_CONF_DIR=/etc/hadoop/conf SHLVL=1 HOME=/home/KevinGre PYTHONPATH=/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip LOGNAME=KevinGre SSH_CONNECTION=131.107.160.1 30017 10.0.0.16 22 LESSOPEN=| /usr/bin/lesspipe %s AZURE_SPARK=1 XDG_RUNTIME_DIR=/run/user/2017 LESSCLOSE=/usr/bin/lesspipe %s %s _=/usr/bin/printenv KevinGre@hn0-keving:~/probe$ '/usr/hdp/current/spark-client/bin/spark-submit' '--master' 'yarn' '--deploy-mode' 'cluster' '--name' 'probeVersion' '--conf' 'spark.yarn.maxAppAttempts=1' '--conf' 'spark.yarn.tags=livy_a17e8ded-2121-4529-95c5-7cc1791311a0' probe.py SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:36:03 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:36:03 INFO TimelineClientImpl: Timeline service address: http://hn0-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8188/ws/v1/timeline/ 16/06/09 03:36:04 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties 16/06/09 03:36:04 INFO WasbAzureIaasSink: Init starting. 16/06/09 03:36:04 INFO AzureIaasSink: Init starting. Initializing MdsLogger. 16/06/09 03:36:04 INFO AzureIaasSink: Init completed. 16/06/09 03:36:04 INFO WasbAzureIaasSink: Init completed. 16/06/09 03:36:04 INFO MetricsSinkAdapter: Sink azurefs2 started 16/06/09 03:36:04 INFO MetricsSystemImpl: Scheduled snapshot period at 60 second(s). 16/06/09 03:36:04 INFO MetricsSystemImpl: azure-file-system metrics system started 16/06/09 03:36:04 INFO ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:36:04 INFO Client: Requesting a new application from cluster with 2 NodeManagers 16/06/09 03:36:04 INFO Client: Verifying our application has not requested more than the maximum memory capability of the cluster (25600 MB per container) 16/06/09 03:36:04 INFO Client: Will allocate AM container, with 1408 MB memory including 384 MB overhead 16/06/09 03:36:04 INFO Client: Setting up container launch context for our AM 16/06/09 03:36:04 INFO Client: Setting up the launch environment for our AM container 16/06/09 03:36:04 INFO Client: Preparing resources for our AM container 16/06/09 03:36:05 INFO Client: Uploading resource file:/home/KevinGre/probe/probe.py -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/probe.py 16/06/09 03:36:05 INFO Client: Uploading resource file:/usr/hdp/2.4.2.0-258/spark/python/lib/pyspark.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/pyspark.zip 16/06/09 03:36:06 INFO Client: Uploading resource file:/usr/hdp/2.4.2.0-258/spark/python/lib/py4j-0.9-src.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/py4j-0.9-src.zip 16/06/09 03:36:06 INFO Client: Uploading resource file:/tmp/spark-85176246-5db8-4015-b954-842c2fe2cb67/__spark_conf__1191782910464867142.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/__spark_conf__1191782910464867142.zip 16/06/09 03:36:07 INFO SecurityManager: Changing view acls to: KevinGre 16/06/09 03:36:07 INFO SecurityManager: Changing modify acls to: KevinGre 16/06/09 03:36:07 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(KevinGre); users with modify permissions: Set(KevinGre) 16/06/09 03:36:07 INFO Client: Submitting application 24 to ResourceManager 16/06/09 03:36:07 INFO YarnClientImpl: Submitted application application_1465316720094_0024 16/06/09 03:36:08 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:08 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: N/A ApplicationMaster RPC port: -1 queue: default start time: 1465443367190 final status: UNDEFINED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0024/ user: KevinGre 16/06/09 03:36:09 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:10 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:11 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:12 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:13 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:14 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:15 INFO Client: Application report for application_1465316720094_0024 (state: ACCEPTED) 16/06/09 03:36:16 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:16 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: 10.0.0.8 ApplicationMaster RPC port: 0 queue: default start time: 1465443367190 final status: UNDEFINED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0024/ user: KevinGre 16/06/09 03:36:17 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:18 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:19 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:20 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:21 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:22 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:23 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:24 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:25 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:26 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:27 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:28 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:29 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:30 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:31 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:32 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:33 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:34 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:35 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:36 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:37 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:38 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:39 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:40 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:41 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:42 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:43 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:44 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:45 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:46 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:47 INFO Client: Application report for application_1465316720094_0024 (state: RUNNING) 16/06/09 03:36:48 INFO Client: Application report for application_1465316720094_0024 (state: FINISHED) 16/06/09 03:36:48 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: 10.0.0.8 ApplicationMaster RPC port: 0 queue: default start time: 1465443367190 final status: SUCCEEDED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0024/ user: KevinGre 16/06/09 03:36:48 INFO ShutdownHookManager: Shutdown hook called 16/06/09 03:36:48 INFO ShutdownHookManager: Deleting directory /tmp/spark-85176246-5db8-4015-b954-842c2fe2cb67 KevinGre@hn0-keving:~/probe$ yarn logs -applicationId application_1465316720094_0024 16/06/09 03:37:19 INFO impl.TimelineClientImpl: Timeline service address: http://hn0-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8188/ws/v1/timeline/ 16/06/09 03:37:20 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:37:20 INFO zlib.ZlibFactory: Successfully loaded & initialized native-zlib library 16/06/09 03:37:20 INFO compress.CodecPool: Got brand-new decompressor [.deflate] Container: container_e05_1465316720094_0024_01_000003 on 10.0.0.10_30050 ========================================================================== LogType:directory.info Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:4767 Log Contents: ls -l: total 28 -rw------- 1 nobody hadoop 103 Jun 9 03:36 container_tokens -rwx------ 1 nobody hadoop 7144 Jun 9 03:36 launch_container.sh lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:36 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/183/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:36 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/182/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:36 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/184/__spark_conf__1191782910464867142.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:36 tmp find -L . -maxdepth 5 -ls: 3538950 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:36 . 3539054 8 -rwx------ 1 nobody hadoop 7144 Jun 9 03:36 ./launch_container.sh 3539055 4 -rw------- 1 nobody hadoop 103 Jun 9 03:36 ./container_tokens 3538962 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:36 ./__spark_conf__ 3539049 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:36 ./__spark_conf__/ssl-server.xml 3539034 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:36 ./__spark_conf__/mapred-site.xml.template 3538965 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:36 ./__spark_conf__/log4j.properties 3539052 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:36 ./__spark_conf__/mapred-queues.xml.template 3539051 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:36 ./__spark_conf__/taskcontroller.cfg 3539031 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:36 ./__spark_conf__/ssl-client.xml 3539046 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:36 ./__spark_conf__/slaves 3539050 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:36 ./__spark_conf__/ssl-server.xml.example 3539039 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:36 ./__spark_conf__/commons-logging.properties 3539012 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:36 ./__spark_conf__/hadoop-metrics2.properties 3539027 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:36 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539036 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:36 ./__spark_conf__/yarn-env.sh 3538963 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:36 ./__spark_conf__/mapred-site.xml 3539038 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:36 ./__spark_conf__/container-executor.cfg 3539045 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:36 ./__spark_conf__/hadoop-policy.xml 3539029 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:36 ./__spark_conf__/core-site.xml 3539041 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:36 ./__spark_conf__/hadoop-metrics.properties 3539025 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:36 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539032 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:36 ./__spark_conf__/capacity-scheduler.xml 3539048 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:36 ./__spark_conf__/metrics.properties 3539033 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:36 ./__spark_conf__/topology_script.py 3539044 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:36 ./__spark_conf__/ssl-client.xml.example 3539053 4 -r-x------ 1 nobody nogroup 2431 Jun 9 03:36 ./__spark_conf__/__spark_conf__.properties 3539035 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:36 ./__spark_conf__/configuration.xsl 3539042 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:36 ./__spark_conf__/mapred-env.sh 3539040 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:36 ./__spark_conf__/task-log4j.properties 3539030 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:36 ./__spark_conf__/dfs.exclude 3539037 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:36 ./__spark_conf__/hdfs-site.xml 3539047 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:36 ./__spark_conf__/topology_mappings.data 3539026 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:36 ./__spark_conf__/yarn-site.xml 3539043 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:36 ./__spark_conf__/health_check 3539028 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:36 ./__spark_conf__/yarn.exclude 3538964 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:36 ./__spark_conf__/hadoop-env.sh 3538957 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:36 ./py4j-0.9-src.zip 3538961 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:36 ./tmp 3538954 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:36 ./pyspark.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:7144 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0024" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/__spark_conf__1191782910464867142.zip#__spark_conf__" export SPARK_LOG_URL_STDERR="http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0024_01_000003/KevinGre/stderr?start=-4096" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.10" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105560" export JVM_PID="$$" export SPARK_HOME="/usr/hdp/current/spark-client" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465443365000,1465443366000" export SPARK_USER="KevinGre" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003" export NM_PORT="30050" export LOGNAME="KevinGre" export SPARK_LOG_URL_STDOUT="http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0024_01_000003/KevinGre/stdout?start=-4096" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export NM_HTTP_PORT="30060" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465443366000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/container_tokens" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export CONTAINER_ID="container_e05_1465316720094_0024_01_000003" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/183/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/182/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/184/__spark_conf__1191782910464867142.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir=$PWD/tmp '-Dspark.driver.port=37148' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003 -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.8:37148 --executor-id 1 --hostname 10.0.0.10 --cores 2 --app-id application_1465316720094_0024 --user-class-path file:$PWD/__app__.jar 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000003/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:11430 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:36:21 INFO CoarseGrainedExecutorBackend: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:36:22 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:36:22 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:36:22 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:36:22 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:36:23 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:36:23 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:36:23 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:36:23 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:36:23 INFO Remoting: Starting remoting 16/06/09 03:36:23 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkExecutorActorSystem@10.0.0.10:39745] 16/06/09 03:36:23 INFO Utils: Successfully started service 'sparkExecutorActorSystem' on port 39745. 16/06/09 03:36:23 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/blockmgr-32fadf68-147b-42ec-9af5-28bb3e18a12a 16/06/09 03:36:23 INFO MemoryStore: MemoryStore started with capacity 4.1 GB 16/06/09 03:36:24 INFO CoarseGrainedExecutorBackend: Connecting to driver: spark://CoarseGrainedScheduler@10.0.0.8:37148 16/06/09 03:36:24 INFO CoarseGrainedExecutorBackend: Successfully registered with driver 16/06/09 03:36:24 INFO Executor: Starting executor ID 1 on host 10.0.0.10 16/06/09 03:36:24 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 39335. 16/06/09 03:36:24 INFO NettyBlockTransferService: Server created on 39335 16/06/09 03:36:24 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:36:24 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:36:45 INFO CoarseGrainedExecutorBackend: Got assigned task 0 16/06/09 03:36:45 INFO CoarseGrainedExecutorBackend: Got assigned task 1 16/06/09 03:36:45 INFO Executor: Running task 0.0 in stage 0.0 (TID 0) 16/06/09 03:36:45 INFO Executor: Running task 1.0 in stage 0.0 (TID 1) 16/06/09 03:36:46 INFO TorrentBroadcast: Started reading broadcast variable 0 16/06/09 03:36:46 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.6 KB, free 2.6 KB) 16/06/09 03:36:46 INFO TorrentBroadcast: Reading broadcast variable 0 took 156 ms 16/06/09 03:36:46 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 3.9 KB, free 6.6 KB) 16/06/09 03:36:47 INFO PythonRunner: Times: total = 1116, boot = 1100, init = 16, finish = 0 16/06/09 03:36:47 INFO PythonRunner: Times: total = 1116, boot = 1095, init = 21, finish = 0 16/06/09 03:36:47 INFO Executor: Finished task 1.0 in stage 0.0 (TID 1). 1163 bytes result sent to driver 16/06/09 03:36:47 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 963 bytes result sent to driver 16/06/09 03:36:48 INFO CoarseGrainedExecutorBackend: Driver commanded a shutdown 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.file.largeRead_ops, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.file.read_bytes, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.file.read_ops, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.file.write_bytes, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.file.write_ops, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.hdfs.largeRead_ops, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.hdfs.read_bytes, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.hdfs.read_ops, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.hdfs.write_bytes, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.filesystem.hdfs.write_ops, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.threadpool.activeTasks, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.threadpool.completeTasks, value=2 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.threadpool.currentPool_size, value=2 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.executor.threadpool.maxPool_size, value=2147483647 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.PS-MarkSweep.count, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.PS-MarkSweep.time, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.PS-Scavenge.count, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.PS-Scavenge.time, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.heap.committed, value=6174015488 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.heap.init, value=6442450944 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.heap.max, value=6174015488 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.heap.usage, value=0.24525097789971725 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.heap.used, value=1514183336 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.non-heap.committed, value=45547520 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.non-heap.init, value=24576000 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.non-heap.max, value=318767104 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.non-heap.usage, value=0.13844843914634303 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.non-heap.used, value=44132808 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.Code-Cache.usage, value=0.029944101969401043 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.Code-Cache.used, value=1507904 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Eden-Space.committed, value=1610612736 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Eden-Space.init, value=1610612736 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Eden-Space.max, value=1610612736 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Eden-Space.usage, value=0.9401287486155828 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Eden-Space.used, value=1514183336 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Old-Gen.committed, value=4294967296 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Old-Gen.init, value=4294967296 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Old-Gen.max, value=4294967296 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Old-Gen.usage, value=0.0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Old-Gen.used, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Perm-Gen.committed, value=42991616 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Perm-Gen.usage, value=0.158796489238739 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Perm-Gen.used, value=42627472 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Survivor-Space.committed, value=268435456 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Survivor-Space.init, value=268435456 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Survivor-Space.max, value=268435456 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Survivor-Space.usage, value=0.0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.pools.PS-Survivor-Space.used, value=0 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.total.committed, value=6219563008 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.total.init, value=6467026944 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.total.max, value=6492782592 16/06/09 03:36:48 INFO metrics: type=GAUGE, name=application_1465316720094_0024.1.jvm.total.used, value=1558323936 16/06/09 03:36:48 INFO MemoryStore: MemoryStore cleared 16/06/09 03:36:48 INFO BlockManager: BlockManager stopped 16/06/09 03:36:48 ERROR CoarseGrainedExecutorBackend: Driver 10.0.0.8:37148 disassociated! Shutting down. 16/06/09 03:36:48 INFO ShutdownHookManager: Shutdown hook called End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:0 Log Contents: End of LogType:stdout Container: container_e05_1465316720094_0024_01_000001 on 10.0.0.8_30050 ========================================================================= LogType:directory.info Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:4977 Log Contents: ls -l: total 32 -rw------- 1 nobody hadoop 74 Jun 9 03:36 container_tokens -rwx------ 1 nobody hadoop 7164 Jun 9 03:36 launch_container.sh lrwxrwxrwx 1 nobody hadoop 73 Jun 9 03:36 probe.py -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/174/probe.py lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:36 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/172/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:36 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/171/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:36 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/173/__spark_conf__1191782910464867142.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:36 tmp find -L . -maxdepth 5 -ls: 3538957 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:36 . 3539170 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:36 ./pyspark.zip 3539315 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:36 ./tmp 3539177 4 -r-x------ 1 nobody nogroup 403 Jun 9 03:36 ./probe.py 3539316 8 -rwx------ 1 nobody hadoop 7164 Jun 9 03:36 ./launch_container.sh 3539317 4 -rw------- 1 nobody hadoop 74 Jun 9 03:36 ./container_tokens 3539178 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:36 ./__spark_conf__ 3539193 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:36 ./__spark_conf__/yarn.exclude 3539305 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:36 ./__spark_conf__/slaves 3539302 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:36 ./__spark_conf__/health_check 3539234 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:36 ./__spark_conf__/configuration.xsl 3539228 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:36 ./__spark_conf__/core-site.xml 3539231 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:36 ./__spark_conf__/capacity-scheduler.xml 3539310 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:36 ./__spark_conf__/taskcontroller.cfg 3539236 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:36 ./__spark_conf__/hdfs-site.xml 3539192 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:36 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539180 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:36 ./__spark_conf__/hadoop-env.sh 3539181 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:36 ./__spark_conf__/log4j.properties 3539309 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:36 ./__spark_conf__/ssl-server.xml.example 3539183 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:36 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539312 4 -r-x------ 1 nobody nogroup 2431 Jun 9 03:36 ./__spark_conf__/__spark_conf__.properties 3539307 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:36 ./__spark_conf__/metrics.properties 3539306 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:36 ./__spark_conf__/topology_mappings.data 3539179 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:36 ./__spark_conf__/mapred-site.xml 3539303 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:36 ./__spark_conf__/ssl-client.xml.example 3539229 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:36 ./__spark_conf__/dfs.exclude 3539301 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:36 ./__spark_conf__/mapred-env.sh 3539233 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:36 ./__spark_conf__/mapred-site.xml.template 3539235 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:36 ./__spark_conf__/yarn-env.sh 3539232 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:36 ./__spark_conf__/topology_script.py 3539184 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:36 ./__spark_conf__/yarn-site.xml 3539182 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:36 ./__spark_conf__/hadoop-metrics2.properties 3539311 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:36 ./__spark_conf__/mapred-queues.xml.template 3539300 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:36 ./__spark_conf__/hadoop-metrics.properties 3539230 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:36 ./__spark_conf__/ssl-client.xml 3539299 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:36 ./__spark_conf__/task-log4j.properties 3539304 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:36 ./__spark_conf__/hadoop-policy.xml 3539297 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:36 ./__spark_conf__/container-executor.cfg 3539308 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:36 ./__spark_conf__/ssl-server.xml 3539298 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:36 ./__spark_conf__/commons-logging.properties 3539173 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:36 ./py4j-0.9-src.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:7164 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0024" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/__spark_conf__1191782910464867142.zip#__spark_conf__" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.8" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export APPLICATION_WEB_PROXY_BASE="/proxy/application_1465316720094_0024" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105560" export JVM_PID="$$" export SPARK_USER="KevinGre" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465443365000,1465443366000" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001" export NM_PORT="30050" export LOGNAME="KevinGre" export APP_SUBMIT_TIME_ENV="1465443367190" export MAX_APP_ATTEMPTS="1" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024" export NM_HTTP_PORT="30060" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465443366000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/container_tokens" export PYSPARK_PYTHON="/usr/bin/anaconda/bin/python" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export PYSPARK_DRIVER_PYTHON="/usr/bin/anaconda/bin/python" export CONTAINER_ID="container_e05_1465316720094_0024_01_000001" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/174/probe.py" "probe.py" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/173/__spark_conf__1191782910464867142.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/171/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/172/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -Xmx1024m -Djava.io.tmpdir=$PWD/tmp -Dhdp.version=2.4.2.0-258 '-Detwlogger.component=sparkdriver' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkdriver_\${user.name}.log' '-Djava.io.tmpdir=/var/tmp/spark' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001 -XX:MaxPermSize=256m org.apache.spark.deploy.yarn.ApplicationMaster --class 'org.apache.spark.deploy.PythonRunner' --primary-py-file probe.py --executor-memory 6144m --executor-cores 2 --properties-file $PWD/__spark_conf__/__spark_conf__.properties 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0024/container_e05_1465316720094_0024_01_000001/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:31048 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:36:11 INFO ApplicationMaster: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:36:12 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:36:12 INFO ApplicationMaster: ApplicationAttemptId: appattempt_1465316720094_0024_000001 16/06/09 03:36:12 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties 16/06/09 03:36:12 INFO WasbAzureIaasSink: Init starting. 16/06/09 03:36:12 INFO AzureIaasSink: Init starting. Initializing MdsLogger. 16/06/09 03:36:12 INFO AzureIaasSink: Init completed. 16/06/09 03:36:12 INFO WasbAzureIaasSink: Init completed. 16/06/09 03:36:12 INFO MetricsSinkAdapter: Sink azurefs2 started 16/06/09 03:36:12 INFO MetricsSystemImpl: Scheduled snapshot period at 60 second(s). 16/06/09 03:36:12 INFO MetricsSystemImpl: azure-file-system metrics system started 16/06/09 03:36:12 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:36:12 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:36:12 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:36:13 INFO ApplicationMaster: Starting the user application in a separate Thread 16/06/09 03:36:13 INFO ApplicationMaster: Waiting for spark context initialization 16/06/09 03:36:13 INFO ApplicationMaster: Waiting for spark context initialization ... 16/06/09 03:36:14 INFO SparkContext: Running Spark version 1.6.1 16/06/09 03:36:14 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:36:14 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:36:14 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:36:14 INFO Utils: Successfully started service 'sparkDriver' on port 37148. 16/06/09 03:36:14 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:36:14 INFO Remoting: Starting remoting 16/06/09 03:36:15 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriverActorSystem@10.0.0.8:41592] 16/06/09 03:36:15 INFO Utils: Successfully started service 'sparkDriverActorSystem' on port 41592. 16/06/09 03:36:15 INFO SparkEnv: Registering MapOutputTracker 16/06/09 03:36:15 INFO SparkEnv: Registering BlockManagerMaster 16/06/09 03:36:15 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/blockmgr-e982ccba-0f7b-4b89-be90-0f6ca0b23c61 16/06/09 03:36:15 INFO MemoryStore: MemoryStore started with capacity 457.9 MB 16/06/09 03:36:15 INFO SparkEnv: Registering OutputCommitCoordinator 16/06/09 03:36:15 INFO JettyUtils: Adding filter: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter 16/06/09 03:36:15 INFO Server: jetty-8.y.z-SNAPSHOT 16/06/09 03:36:15 INFO AbstractConnector: Started SelectChannelConnector@0.0.0.0:33398 16/06/09 03:36:15 INFO Utils: Successfully started service 'SparkUI' on port 33398. 16/06/09 03:36:15 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://10.0.0.8:33398 16/06/09 03:36:15 INFO YarnClusterScheduler: Created YarnClusterScheduler 16/06/09 03:36:15 INFO SchedulerExtensionServices: Starting Yarn extension services with app application_1465316720094_0024 and attemptId Some(appattempt_1465316720094_0024_000001) 16/06/09 03:36:15 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 46265. 16/06/09 03:36:15 INFO NettyBlockTransferService: Server created on 46265 16/06/09 03:36:15 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:36:15 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.8:46265 with 457.9 MB RAM, BlockManagerId(driver, 10.0.0.8, 46265) 16/06/09 03:36:15 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:36:16 INFO EventLoggingListener: Logging events to wasb:///hdp/spark-events/application_1465316720094_0024_1 16/06/09 03:36:16 INFO YarnSchedulerBackend$YarnSchedulerEndpoint: ApplicationMaster registered as NettyRpcEndpointRef(spark://YarnAM@10.0.0.8:37148) 16/06/09 03:36:16 INFO YarnRMClient: Registering the ApplicationMaster 16/06/09 03:36:16 INFO ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:36:16 INFO YarnAllocator: Will request 2 executor containers, each with 2 cores and 6528 MB memory including 384 MB overhead 16/06/09 03:36:16 INFO YarnAllocator: Container request (host: Any, capability: ) 16/06/09 03:36:16 INFO YarnAllocator: Container request (host: Any, capability: ) 16/06/09 03:36:16 INFO ApplicationMaster: Started progress reporter thread with (heartbeat : 5000, initial allocation : 200) intervals 16/06/09 03:36:17 INFO AMRMClientImpl: Received new token for : 10.0.0.10:30050 16/06/09 03:36:17 INFO YarnAllocator: Launching container container_e05_1465316720094_0024_01_000003 for on host 10.0.0.10 16/06/09 03:36:17 INFO YarnAllocator: Launching ExecutorRunnable. driverUrl: spark://CoarseGrainedScheduler@10.0.0.8:37148, executorHostname: 10.0.0.10 16/06/09 03:36:17 INFO ExecutorRunnable: Starting Executor Container 16/06/09 03:36:17 INFO YarnAllocator: Received 1 containers from YARN, launching executors on 1 of them. 16/06/09 03:36:17 INFO ContainerManagementProtocolProxy: yarn.client.max-cached-nodemanagers-proxies : 0 16/06/09 03:36:17 INFO ExecutorRunnable: Setting up ContainerLaunchContext 16/06/09 03:36:17 INFO ExecutorRunnable: Preparing Local resources 16/06/09 03:36:18 INFO ExecutorRunnable: Prepared Local resources Map(pyspark.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0024/pyspark.zip" userInfo: "kevingre" } size: 357163 timestamp: 1465443365000 type: FILE visibility: PRIVATE, py4j-0.9-src.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0024/py4j-0.9-src.zip" userInfo: "kevingre" } size: 44846 timestamp: 1465443366000 type: FILE visibility: PRIVATE, __spark_conf__ -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0024/__spark_conf__1191782910464867142.zip" userInfo: "kevingre" } size: 105560 timestamp: 1465443366000 type: ARCHIVE visibility: PRIVATE) 16/06/09 03:36:18 INFO ExecutorRunnable: =============================================================================== YARN executor launch context: env: CLASSPATH -> {{PWD}}{{PWD}}/__spark_conf__/usr/hdp/current/spark-client/lib/spark-assembly.jar$HADOOP_CONF_DIR/usr/hdp/current/hadoop-client/*/usr/hdp/current/hadoop-client/lib/*/usr/hdp/current/hadoop-hdfs-client/*/usr/hdp/current/hadoop-hdfs-client/lib/*/usr/hdp/current/hadoop-yarn-client/*/usr/hdp/current/hadoop-yarn-client/lib/*$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure:/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_ARCHIVES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/__spark_conf__1191782910464867142.zip#__spark_conf__ SPARK_LOG_URL_STDERR -> http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0024_01_000003/KevinGre/stderr?start=-4096 SPARK_YARN_CACHE_FILES_FILE_SIZES -> 357163,44846 SPARK_YARN_STAGING_DIR -> .sparkStaging/application_1465316720094_0024 SPARK_DIST_CLASSPATH -> :/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_FILES_VISIBILITIES -> PRIVATE,PRIVATE SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES -> 105560 SPARK_USER -> KevinGre SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS -> 1465443366000 SPARK_YARN_MODE -> true PYTHONHASHSEED -> 0 SPARK_YARN_CACHE_FILES_TIME_STAMPS -> 1465443365000,1465443366000 SPARK_HOME -> /usr/hdp/current/spark-client PYTHONPATH -> /usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip{{PWD}}/pyspark.zip{{PWD}}/py4j-0.9-src.zip SPARK_LOG_URL_STDOUT -> http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0024_01_000003/KevinGre/stdout?start=-4096 SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES -> PRIVATE SPARK_YARN_CACHE_FILES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0024/py4j-0.9-src.zip#py4j-0.9-src.zip command: {{JAVA_HOME}}/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir={{PWD}}/tmp '-Dspark.driver.port=37148' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir= -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.8:37148 --executor-id 1 --hostname 10.0.0.10 --cores 2 --app-id application_1465316720094_0024 --user-class-path file:$PWD/__app__.jar 1> /stdout 2> /stderr =============================================================================== 16/06/09 03:36:18 INFO ContainerManagementProtocolProxy: Opening proxy : 10.0.0.10:30050 16/06/09 03:36:24 INFO YarnClusterSchedulerBackend: Registered executor NettyRpcEndpointRef(null) (10.0.0.10:48478) with ID 1 16/06/09 03:36:24 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.10:39335 with 4.1 GB RAM, BlockManagerId(1, 10.0.0.10, 39335) 16/06/09 03:36:45 INFO YarnClusterSchedulerBackend: SchedulerBackend is ready for scheduling beginning after waiting maxRegisteredResourcesWaitingTime: 30000(ms) 16/06/09 03:36:45 INFO YarnClusterScheduler: YarnClusterScheduler.postStartHook done 16/06/09 03:36:45 INFO SparkContext: Starting job: collect at probe.py:15 16/06/09 03:36:45 INFO DAGScheduler: Got job 0 (collect at probe.py:15) with 2 output partitions 16/06/09 03:36:45 INFO DAGScheduler: Final stage: ResultStage 0 (collect at probe.py:15) 16/06/09 03:36:45 INFO DAGScheduler: Parents of final stage: List() 16/06/09 03:36:45 INFO DAGScheduler: Missing parents: List() 16/06/09 03:36:45 INFO DAGScheduler: Submitting ResultStage 0 (PythonRDD[1] at collect at probe.py:15), which has no missing parents 16/06/09 03:36:45 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 3.9 KB, free 3.9 KB) 16/06/09 03:36:45 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.6 KB, free 6.6 KB) 16/06/09 03:36:45 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.8:46265 (size: 2.6 KB, free: 457.9 MB) 16/06/09 03:36:45 INFO SparkContext: Created broadcast 0 from broadcast at DAGScheduler.scala:1006 16/06/09 03:36:45 INFO DAGScheduler: Submitting 2 missing tasks from ResultStage 0 (PythonRDD[1] at collect at probe.py:15) 16/06/09 03:36:45 INFO YarnClusterScheduler: Adding task set 0.0 with 2 tasks 16/06/09 03:36:45 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, 10.0.0.10, partition 0,PROCESS_LOCAL, 2064 bytes) 16/06/09 03:36:45 INFO TaskSetManager: Starting task 1.0 in stage 0.0 (TID 1, 10.0.0.10, partition 1,PROCESS_LOCAL, 2083 bytes) 16/06/09 03:36:46 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.10:39335 (size: 2.6 KB, free: 4.1 GB) 16/06/09 03:36:47 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 1613 ms on 10.0.0.10 (1/2) 16/06/09 03:36:47 INFO DAGScheduler: ResultStage 0 (collect at probe.py:15) finished in 1.622 s 16/06/09 03:36:47 INFO DAGScheduler: Job 0 finished: collect at probe.py:15, took 1.751852 s 16/06/09 03:36:47 INFO TaskSetManager: Finished task 1.0 in stage 0.0 (TID 1) in 1604 ms on 10.0.0.10 (2/2) 16/06/09 03:36:47 INFO YarnClusterScheduler: Removed TaskSet 0.0, whose tasks have all completed, from pool 16/06/09 03:36:47 INFO ApplicationMaster: Final app status: SUCCEEDED, exitCode: 0 16/06/09 03:36:47 INFO SparkContext: Invoking stop() from shutdown hook 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/metrics/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null} 16/06/09 03:36:47 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null} 16/06/09 03:36:47 INFO SparkUI: Stopped Spark web UI at http://10.0.0.8:33398 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.BlockManager.disk.diskSpaceUsed_MB, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.BlockManager.memory.maxMem_MB, value=4648 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.BlockManager.memory.memUsed_MB, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.BlockManager.memory.remainingMem_MB, value=4648 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.DAGScheduler.job.activeJobs, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.DAGScheduler.job.allJobs, value=1 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.DAGScheduler.stage.failedStages, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.DAGScheduler.stage.runningStages, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.DAGScheduler.stage.waitingStages, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.PS-MarkSweep.count, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.PS-MarkSweep.time, value=0 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.PS-Scavenge.count, value=4 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.PS-Scavenge.time, value=124 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.heap.committed, value=557842432 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.heap.init, value=460977216 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.heap.max, value=954728448 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.heap.usage, value=0.1016048156972903 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.heap.used, value=97248984 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.non-heap.committed, value=65994752 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.non-heap.init, value=24576000 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.non-heap.max, value=318767104 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.non-heap.usage, value=0.2031079091523823 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.non-heap.used, value=64744120 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.Code-Cache.usage, value=0.03537114461263021 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.Code-Cache.used, value=1780288 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Eden-Space.committed, value=231735296 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Eden-Space.init, value=115867648 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Eden-Space.max, value=320339968 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Eden-Space.usage, value=0.11306547923486088 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Eden-Space.used, value=36219392 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Old-Gen.committed, value=307232768 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Old-Gen.init, value=307232768 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Old-Gen.max, value=715653120 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Old-Gen.usage, value=0.05961201985677083 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Old-Gen.used, value=42661528 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Perm-Gen.committed, value=63438848 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Perm-Gen.usage, value=0.234576016664505 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Perm-Gen.used, value=62968520 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Survivor-Space.committed, value=18874368 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Survivor-Space.init, value=18874368 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Survivor-Space.max, value=18874368 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Survivor-Space.usage, value=0.9990370008680556 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.pools.PS-Survivor-Space.used, value=18856192 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.total.committed, value=623837184 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.total.init, value=485553216 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.total.max, value=1273495552 16/06/09 03:36:47 INFO metrics: type=GAUGE, name=application_1465316720094_0024.driver.jvm.total.used, value=163221496 16/06/09 03:36:47 INFO metrics: type=TIMER, name=application_1465316720094_0024.driver.DAGScheduler.messageProcessingTime, count=6, min=0.070799, max=120.164425, mean=24.128283572325376, stddev=43.98730914597296, median=3.698385, p75=8.893265, p95=120.164425, p98=120.164425, p99=120.164425, p999=120.164425, mean_rate=0.1854192713725955, m1=0.011458136074669095, m5=0.0030925140804588976, m15=0.0010836790736504383, rate_unit=events/second, duration_unit=milliseconds 16/06/09 03:36:48 INFO YarnAllocator: Driver requested a total number of 0 executor(s). 16/06/09 03:36:48 INFO YarnClusterSchedulerBackend: Shutting down all executors 16/06/09 03:36:48 INFO YarnClusterSchedulerBackend: Asking each executor to shut down 16/06/09 03:36:48 INFO SchedulerExtensionServices: Stopping SchedulerExtensionServices (serviceOption=Some(), services=List(), started=false) 16/06/09 03:36:48 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 16/06/09 03:36:48 INFO MemoryStore: MemoryStore cleared 16/06/09 03:36:48 INFO BlockManager: BlockManager stopped 16/06/09 03:36:48 INFO BlockManagerMaster: BlockManagerMaster stopped 16/06/09 03:36:48 INFO SparkContext: Successfully stopped SparkContext 16/06/09 03:36:48 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 16/06/09 03:36:48 WARN Dispatcher: Message RemoteProcessDisconnected(10.0.0.10:48478) dropped. java.lang.IllegalStateException: RpcEnv already stopped. at org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:159) at org.apache.spark.rpc.netty.Dispatcher.postToAll(Dispatcher.scala:109) at org.apache.spark.rpc.netty.NettyRpcHandler.connectionTerminated(NettyRpcEnv.scala:630) at org.apache.spark.network.server.TransportRequestHandler.channelUnregistered(TransportRequestHandler.java:94) at org.apache.spark.network.server.TransportChannelHandler.channelUnregistered(TransportChannelHandler.java:89) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.DefaultChannelPipeline.fireChannelUnregistered(DefaultChannelPipeline.java:739) at io.netty.channel.AbstractChannel$AbstractUnsafe$8.run(AbstractChannel.java:659) at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:328) at io.netty.util.concurrent.SingleThreadEventExecutor.confirmShutdown(SingleThreadEventExecutor.java:627) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:362) at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111) at java.lang.Thread.run(Thread.java:745) 16/06/09 03:36:48 WARN Dispatcher: Message RemoteProcessDisconnected(10.0.0.10:48478) dropped. java.lang.IllegalStateException: RpcEnv already stopped. at org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:159) at org.apache.spark.rpc.netty.Dispatcher.postToAll(Dispatcher.scala:109) at org.apache.spark.rpc.netty.NettyRpcHandler.connectionTerminated(NettyRpcEnv.scala:630) at org.apache.spark.network.server.TransportRequestHandler.channelUnregistered(TransportRequestHandler.java:94) at org.apache.spark.network.server.TransportChannelHandler.channelUnregistered(TransportChannelHandler.java:89) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158) at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144) at io.netty.channel.DefaultChannelPipeline.fireChannelUnregistered(DefaultChannelPipeline.java:739) at io.netty.channel.AbstractChannel$AbstractUnsafe$8.run(AbstractChannel.java:659) at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:328) at io.netty.util.concurrent.SingleThreadEventExecutor.confirmShutdown(SingleThreadEventExecutor.java:627) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:362) at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111) at java.lang.Thread.run(Thread.java:745) 16/06/09 03:36:48 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon. 16/06/09 03:36:48 INFO ApplicationMaster: Unregistering ApplicationMaster with SUCCEEDED 16/06/09 03:36:48 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports. 16/06/09 03:36:48 INFO AMRMClientImpl: Waiting for application to be successfully unregistered. 16/06/09 03:36:48 INFO RemoteActorRefProvider$RemotingTerminator: Remoting shut down. 16/06/09 03:36:48 INFO ApplicationMaster: Deleting staging directory .sparkStaging/application_1465316720094_0024 16/06/09 03:36:48 INFO ShutdownHookManager: Shutdown hook called 16/06/09 03:36:48 INFO ShutdownHookManager: Deleting directory /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/spark-a4b38621-701d-4046-ada2-c11d39728867 16/06/09 03:36:48 INFO ShutdownHookManager: Deleting directory /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0024/spark-a4b38621-701d-4046-ada2-c11d39728867/pyspark-ef8a0eea-5184-4e08-9091-d454e03fb2b3 16/06/09 03:36:48 INFO MetricsSystemImpl: Stopping azure-file-system metrics system... 16/06/09 03:36:48 INFO MetricsSinkAdapter: azurefs2 thread interrupted. 16/06/09 03:36:48 INFO MetricsSystemImpl: azure-file-system metrics system stopped. 16/06/09 03:36:48 INFO MetricsSystemImpl: azure-file-system metrics system shutdown complete. End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:36:50 +0000 2016 LogLength:353 Log Contents: driver: 2.7.11 |Anaconda 2.3.0 (64-bit)| (default, Dec 6 2015, 18:08:32) [GCC 4.4.7 20120313 (Red Hat 4.4.7-1)] PYTHONHASHSEED=0 PYSPARK_PYTHON=/usr/bin/anaconda/bin/python executors: 2.7.11 |Anaconda 2.3.0 (64-bit)| (default, Dec 6 2015, 18:08:32) [GCC 4.4.7 20120313 (Red Hat 4.4.7-1)] PYTHONHASHSEED=0 PYSPARK_PYTHON=/usr/bin/anaconda/bin/python End of LogType:stdout KevinGre@hn0-keving:~/probe$ KevinGre@hn0-keving:~/probe$ '/usr/hdp/current/spark-client/bin/spark-submit' '--master' 'yarn' '--deploy-mode' 'cluster' '--name' 'probeVersion' '--conf' 'spark.yarn.maxAppAttempts=1' '--conf' 'spark.yarn.tags=livy_a17e8ded-2121-4529-95c5-7cc1791311a0' '--conf' 'spark.yarn.appMasterEnv.PYSPARK_DRIVER_PYTHON=python3' '--conf' 'spark.yarn.appMasterEnv.PYSPARK_PYTHON=python3' probe.py SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:38:53 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:38:54 INFO TimelineClientImpl: Timeline service address: http://hn0-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8188/ws/v1/timeline/ 16/06/09 03:38:54 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties 16/06/09 03:38:54 INFO WasbAzureIaasSink: Init starting. 16/06/09 03:38:54 INFO AzureIaasSink: Init starting. Initializing MdsLogger. 16/06/09 03:38:54 INFO AzureIaasSink: Init completed. 16/06/09 03:38:54 INFO WasbAzureIaasSink: Init completed. 16/06/09 03:38:54 INFO MetricsSinkAdapter: Sink azurefs2 started 16/06/09 03:38:54 INFO MetricsSystemImpl: Scheduled snapshot period at 60 second(s). 16/06/09 03:38:54 INFO MetricsSystemImpl: azure-file-system metrics system started 16/06/09 03:38:54 INFO ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:38:55 INFO Client: Requesting a new application from cluster with 2 NodeManagers 16/06/09 03:38:55 INFO Client: Verifying our application has not requested more than the maximum memory capability of the cluster (25600 MB per container) 16/06/09 03:38:55 INFO Client: Will allocate AM container, with 1408 MB memory including 384 MB overhead 16/06/09 03:38:55 INFO Client: Setting up container launch context for our AM 16/06/09 03:38:55 INFO Client: Setting up the launch environment for our AM container 16/06/09 03:38:55 INFO Client: Preparing resources for our AM container 16/06/09 03:38:55 INFO Client: Uploading resource file:/home/KevinGre/probe/probe.py -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/probe.py 16/06/09 03:38:55 INFO Client: Uploading resource file:/usr/hdp/2.4.2.0-258/spark/python/lib/pyspark.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip 16/06/09 03:38:55 INFO Client: Uploading resource file:/usr/hdp/2.4.2.0-258/spark/python/lib/py4j-0.9-src.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip 16/06/09 03:38:56 INFO Client: Uploading resource file:/tmp/spark-7eccba57-25c6-45d3-aadf-c72cddb0abfe/__spark_conf__2196432329335185680.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip 16/06/09 03:38:56 INFO SecurityManager: Changing view acls to: KevinGre 16/06/09 03:38:56 INFO SecurityManager: Changing modify acls to: KevinGre 16/06/09 03:38:56 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(KevinGre); users with modify permissions: Set(KevinGre) 16/06/09 03:38:56 INFO Client: Submitting application 25 to ResourceManager 16/06/09 03:38:56 INFO YarnClientImpl: Submitted application application_1465316720094_0025 16/06/09 03:38:57 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:38:57 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: N/A ApplicationMaster RPC port: -1 queue: default start time: 1465443536304 final status: UNDEFINED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0025/ user: KevinGre 16/06/09 03:38:58 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:38:59 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:39:00 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:39:01 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:39:02 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:39:03 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:39:04 INFO Client: Application report for application_1465316720094_0025 (state: ACCEPTED) 16/06/09 03:39:05 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:05 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: 10.0.0.10 ApplicationMaster RPC port: 0 queue: default start time: 1465443536304 final status: UNDEFINED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0025/ user: KevinGre 16/06/09 03:39:06 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:07 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:08 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:09 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:10 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:11 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:12 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:13 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:14 INFO Client: Application report for application_1465316720094_0025 (state: RUNNING) 16/06/09 03:39:15 INFO Client: Application report for application_1465316720094_0025 (state: FINISHED) 16/06/09 03:39:15 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: 10.0.0.10 ApplicationMaster RPC port: 0 queue: default start time: 1465443536304 final status: SUCCEEDED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0025/ user: KevinGre 16/06/09 03:39:15 INFO Client: Deleting staging directory .sparkStaging/application_1465316720094_0025 16/06/09 03:39:15 INFO ShutdownHookManager: Shutdown hook called 16/06/09 03:39:15 INFO ShutdownHookManager: Deleting directory /tmp/spark-7eccba57-25c6-45d3-aadf-c72cddb0abfe KevinGre@hn0-keving:~/probe$ yarn logs -applicationId application_1465316720094_0025 16/06/09 03:39:36 INFO impl.TimelineClientImpl: Timeline service address: http://hn0-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8188/ws/v1/timeline/ 16/06/09 03:39:37 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:39:37 INFO zlib.ZlibFactory: Successfully loaded & initialized native-zlib library 16/06/09 03:39:37 INFO compress.CodecPool: Got brand-new decompressor [.deflate] Container: container_e05_1465316720094_0025_01_000002 on 10.0.0.10_30050 ========================================================================== LogType:directory.info Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:4767 Log Contents: ls -l: total 28 -rw------- 1 nobody hadoop 103 Jun 9 03:39 container_tokens -rwx------ 1 nobody hadoop 7145 Jun 9 03:39 launch_container.sh lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:39 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/186/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:39 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/185/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:39 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/187/__spark_conf__2196432329335185680.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:39 tmp find -L . -maxdepth 5 -ls: 3539105 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:39 . 3539108 8 -rwx------ 1 nobody hadoop 7145 Jun 9 03:39 ./launch_container.sh 3539109 4 -rw------- 1 nobody hadoop 103 Jun 9 03:39 ./container_tokens 3539017 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:38 ./__spark_conf__ 3539084 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:38 ./__spark_conf__/ssl-server.xml 3539069 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:38 ./__spark_conf__/mapred-site.xml.template 3539020 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:38 ./__spark_conf__/log4j.properties 3539087 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:38 ./__spark_conf__/mapred-queues.xml.template 3539086 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:38 ./__spark_conf__/taskcontroller.cfg 3539066 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:38 ./__spark_conf__/ssl-client.xml 3539081 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:38 ./__spark_conf__/slaves 3539085 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:38 ./__spark_conf__/ssl-server.xml.example 3539074 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:38 ./__spark_conf__/commons-logging.properties 3539021 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:38 ./__spark_conf__/hadoop-metrics2.properties 3539024 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:38 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539071 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:38 ./__spark_conf__/yarn-env.sh 3539018 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:38 ./__spark_conf__/mapred-site.xml 3539073 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:38 ./__spark_conf__/container-executor.cfg 3539080 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:38 ./__spark_conf__/hadoop-policy.xml 3539064 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:38 ./__spark_conf__/core-site.xml 3539076 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:38 ./__spark_conf__/hadoop-metrics.properties 3539022 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:38 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539067 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:38 ./__spark_conf__/capacity-scheduler.xml 3539083 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:38 ./__spark_conf__/metrics.properties 3539068 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:38 ./__spark_conf__/topology_script.py 3539079 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:38 ./__spark_conf__/ssl-client.xml.example 3539088 4 -r-x------ 1 nobody nogroup 2532 Jun 9 03:38 ./__spark_conf__/__spark_conf__.properties 3539070 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:38 ./__spark_conf__/configuration.xsl 3539077 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:38 ./__spark_conf__/mapred-env.sh 3539075 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:38 ./__spark_conf__/task-log4j.properties 3539065 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:38 ./__spark_conf__/dfs.exclude 3539072 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:38 ./__spark_conf__/hdfs-site.xml 3539082 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:38 ./__spark_conf__/topology_mappings.data 3539023 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:38 ./__spark_conf__/yarn-site.xml 3539078 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:38 ./__spark_conf__/health_check 3539063 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:38 ./__spark_conf__/yarn.exclude 3539019 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:38 ./__spark_conf__/hadoop-env.sh 3539011 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:38 ./py4j-0.9-src.zip 3539107 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:39 ./tmp 3538974 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:38 ./pyspark.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:7145 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0025" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip#__spark_conf__" export SPARK_LOG_URL_STDERR="http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0025_01_000002/KevinGre/stderr?start=-4096" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.10" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105661" export JVM_PID="$$" export SPARK_HOME="/usr/hdp/current/spark-client" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465443535000,1465443535000" export SPARK_USER="KevinGre" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002" export NM_PORT="30050" export LOGNAME="KevinGre" export SPARK_LOG_URL_STDOUT="http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0025_01_000002/KevinGre/stdout?start=-4096" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export NM_HTTP_PORT="30060" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465443536000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/container_tokens" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export CONTAINER_ID="container_e05_1465316720094_0025_01_000002" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/187/__spark_conf__2196432329335185680.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/185/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/186/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir=$PWD/tmp '-Dspark.driver.port=43484' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002 -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:43484 --executor-id 1 --hostname 10.0.0.10 --cores 2 --app-id application_1465316720094_0025 --user-class-path file:$PWD/__app__.jar 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000002/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:11660 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:39:07 INFO CoarseGrainedExecutorBackend: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:39:08 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:39:08 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:39:08 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:39:08 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:39:09 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:39:09 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:39:09 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:39:09 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:39:09 INFO Remoting: Starting remoting 16/06/09 03:39:10 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkExecutorActorSystem@10.0.0.10:43940] 16/06/09 03:39:10 INFO Utils: Successfully started service 'sparkExecutorActorSystem' on port 43940. 16/06/09 03:39:10 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/blockmgr-05ae5f2d-b0c7-4376-bb01-6e63d00087af 16/06/09 03:39:10 INFO MemoryStore: MemoryStore started with capacity 4.1 GB 16/06/09 03:39:10 INFO CoarseGrainedExecutorBackend: Connecting to driver: spark://CoarseGrainedScheduler@10.0.0.10:43484 16/06/09 03:39:10 INFO CoarseGrainedExecutorBackend: Successfully registered with driver 16/06/09 03:39:10 INFO Executor: Starting executor ID 1 on host 10.0.0.10 16/06/09 03:39:10 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 36659. 16/06/09 03:39:10 INFO NettyBlockTransferService: Server created on 36659 16/06/09 03:39:10 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:39:10 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:39:13 INFO CoarseGrainedExecutorBackend: Got assigned task 1 16/06/09 03:39:13 INFO CoarseGrainedExecutorBackend: Got assigned task 3 16/06/09 03:39:13 INFO Executor: Running task 3.0 in stage 0.0 (TID 3) 16/06/09 03:39:13 INFO Executor: Running task 1.0 in stage 0.0 (TID 1) 16/06/09 03:39:13 INFO TorrentBroadcast: Started reading broadcast variable 0 16/06/09 03:39:13 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.6 KB, free 2.6 KB) 16/06/09 03:39:13 INFO TorrentBroadcast: Reading broadcast variable 0 took 111 ms 16/06/09 03:39:13 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 3.9 KB, free 6.6 KB) 16/06/09 03:39:14 INFO PythonRunner: Times: total = 1007, boot = 991, init = 16, finish = 0 16/06/09 03:39:14 INFO PythonRunner: Times: total = 1007, boot = 996, init = 11, finish = 0 16/06/09 03:39:14 INFO Executor: Finished task 1.0 in stage 0.0 (TID 1). 963 bytes result sent to driver 16/06/09 03:39:14 INFO Executor: Finished task 3.0 in stage 0.0 (TID 3). 1163 bytes result sent to driver 16/06/09 03:39:15 INFO CoarseGrainedExecutorBackend: Driver commanded a shutdown 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.file.largeRead_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.file.read_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.file.read_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.file.write_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.file.write_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.hdfs.largeRead_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.hdfs.read_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.hdfs.read_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.hdfs.write_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.filesystem.hdfs.write_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.threadpool.activeTasks, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.threadpool.completeTasks, value=2 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.threadpool.currentPool_size, value=2 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.executor.threadpool.maxPool_size, value=2147483647 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.PS-MarkSweep.count, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.PS-MarkSweep.time, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.PS-Scavenge.count, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.PS-Scavenge.time, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.heap.committed, value=6174015488 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.heap.init, value=6442450944 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.heap.max, value=6174015488 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.heap.usage, value=0.24526525839515353 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.heap.used, value=1514271504 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.non-heap.committed, value=45547520 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.non-heap.init, value=24576000 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.non-heap.max, value=318767104 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.non-heap.usage, value=0.13824954785798726 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.non-heap.used, value=44069408 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.Code-Cache.usage, value=0.030451456705729168 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.Code-Cache.used, value=1532672 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Eden-Space.committed, value=1610612736 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Eden-Space.init, value=1610612736 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Eden-Space.max, value=1610612736 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Eden-Space.usage, value=0.9401834905147552 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Eden-Space.used, value=1514271504 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Old-Gen.committed, value=4294967296 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Old-Gen.init, value=4294967296 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Old-Gen.max, value=4294967296 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Old-Gen.usage, value=0.0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Old-Gen.used, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Perm-Gen.committed, value=42991616 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Perm-Gen.usage, value=0.158465176820755 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Perm-Gen.used, value=42537672 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Survivor-Space.committed, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Survivor-Space.init, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Survivor-Space.max, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Survivor-Space.usage, value=0.0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.pools.PS-Survivor-Space.used, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.total.committed, value=6219563008 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.total.init, value=6467026944 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.total.max, value=6492782592 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.1.jvm.total.used, value=1558346728 16/06/09 03:39:15 INFO MemoryStore: MemoryStore cleared 16/06/09 03:39:15 INFO BlockManager: BlockManager stopped 16/06/09 03:39:15 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon. 16/06/09 03:39:15 ERROR CoarseGrainedExecutorBackend: Driver 10.0.0.10:43484 disassociated! Shutting down. 16/06/09 03:39:15 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports. 16/06/09 03:39:15 INFO ShutdownHookManager: Shutdown hook called End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:0 Log Contents: End of LogType:stdout Container: container_e05_1465316720094_0025_01_000001 on 10.0.0.10_30050 ========================================================================== LogType:directory.info Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:4977 Log Contents: ls -l: total 32 -rw------- 1 nobody hadoop 74 Jun 9 03:38 container_tokens -rwx------ 1 nobody hadoop 7165 Jun 9 03:38 launch_container.sh lrwxrwxrwx 1 nobody hadoop 73 Jun 9 03:38 probe.py -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/188/probe.py lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:38 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/186/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:38 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/185/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:38 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/187/__spark_conf__2196432329335185680.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:38 tmp find -L . -maxdepth 5 -ls: 3538947 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:38 . 3539016 4 -r-x------ 1 nobody nogroup 403 Jun 9 03:38 ./probe.py 3539092 8 -rwx------ 1 nobody hadoop 7165 Jun 9 03:38 ./launch_container.sh 3539093 4 -rw------- 1 nobody hadoop 74 Jun 9 03:38 ./container_tokens 3539017 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:38 ./__spark_conf__ 3539084 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:38 ./__spark_conf__/ssl-server.xml 3539069 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:38 ./__spark_conf__/mapred-site.xml.template 3539020 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:38 ./__spark_conf__/log4j.properties 3539087 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:38 ./__spark_conf__/mapred-queues.xml.template 3539086 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:38 ./__spark_conf__/taskcontroller.cfg 3539066 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:38 ./__spark_conf__/ssl-client.xml 3539081 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:38 ./__spark_conf__/slaves 3539085 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:38 ./__spark_conf__/ssl-server.xml.example 3539074 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:38 ./__spark_conf__/commons-logging.properties 3539021 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:38 ./__spark_conf__/hadoop-metrics2.properties 3539024 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:38 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539071 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:38 ./__spark_conf__/yarn-env.sh 3539018 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:38 ./__spark_conf__/mapred-site.xml 3539073 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:38 ./__spark_conf__/container-executor.cfg 3539080 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:38 ./__spark_conf__/hadoop-policy.xml 3539064 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:38 ./__spark_conf__/core-site.xml 3539076 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:38 ./__spark_conf__/hadoop-metrics.properties 3539022 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:38 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539067 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:38 ./__spark_conf__/capacity-scheduler.xml 3539083 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:38 ./__spark_conf__/metrics.properties 3539068 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:38 ./__spark_conf__/topology_script.py 3539079 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:38 ./__spark_conf__/ssl-client.xml.example 3539088 4 -r-x------ 1 nobody nogroup 2532 Jun 9 03:38 ./__spark_conf__/__spark_conf__.properties 3539070 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:38 ./__spark_conf__/configuration.xsl 3539077 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:38 ./__spark_conf__/mapred-env.sh 3539075 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:38 ./__spark_conf__/task-log4j.properties 3539065 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:38 ./__spark_conf__/dfs.exclude 3539072 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:38 ./__spark_conf__/hdfs-site.xml 3539082 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:38 ./__spark_conf__/topology_mappings.data 3539023 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:38 ./__spark_conf__/yarn-site.xml 3539078 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:38 ./__spark_conf__/health_check 3539063 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:38 ./__spark_conf__/yarn.exclude 3539019 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:38 ./__spark_conf__/hadoop-env.sh 3539011 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:38 ./py4j-0.9-src.zip 3539091 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:38 ./tmp 3538974 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:38 ./pyspark.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:7165 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0025" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip#__spark_conf__" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.10" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export APPLICATION_WEB_PROXY_BASE="/proxy/application_1465316720094_0025" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105661" export JVM_PID="$$" export SPARK_USER="KevinGre" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465443535000,1465443535000" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001" export NM_PORT="30050" export LOGNAME="KevinGre" export APP_SUBMIT_TIME_ENV="1465443536304" export MAX_APP_ATTEMPTS="1" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025" export NM_HTTP_PORT="30060" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465443536000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/container_tokens" export PYSPARK_PYTHON="/usr/bin/anaconda/bin/python" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export PYSPARK_DRIVER_PYTHON="/usr/bin/anaconda/bin/python" export CONTAINER_ID="container_e05_1465316720094_0025_01_000001" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/188/probe.py" "probe.py" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/187/__spark_conf__2196432329335185680.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/185/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/186/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -Xmx1024m -Djava.io.tmpdir=$PWD/tmp -Dhdp.version=2.4.2.0-258 '-Detwlogger.component=sparkdriver' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkdriver_\${user.name}.log' '-Djava.io.tmpdir=/var/tmp/spark' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001 -XX:MaxPermSize=256m org.apache.spark.deploy.yarn.ApplicationMaster --class 'org.apache.spark.deploy.PythonRunner' --primary-py-file probe.py --executor-memory 6144m --executor-cores 2 --properties-file $PWD/__spark_conf__/__spark_conf__.properties 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000001/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:32625 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:39:00 INFO ApplicationMaster: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:39:01 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:39:01 INFO ApplicationMaster: ApplicationAttemptId: appattempt_1465316720094_0025_000001 16/06/09 03:39:01 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties 16/06/09 03:39:01 INFO WasbAzureIaasSink: Init starting. 16/06/09 03:39:01 INFO AzureIaasSink: Init starting. Initializing MdsLogger. 16/06/09 03:39:01 INFO AzureIaasSink: Init completed. 16/06/09 03:39:01 INFO WasbAzureIaasSink: Init completed. 16/06/09 03:39:01 INFO MetricsSinkAdapter: Sink azurefs2 started 16/06/09 03:39:01 INFO MetricsSystemImpl: Scheduled snapshot period at 60 second(s). 16/06/09 03:39:01 INFO MetricsSystemImpl: azure-file-system metrics system started 16/06/09 03:39:01 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:39:01 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:39:01 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:39:02 INFO ApplicationMaster: Starting the user application in a separate Thread 16/06/09 03:39:02 INFO ApplicationMaster: Waiting for spark context initialization 16/06/09 03:39:02 INFO ApplicationMaster: Waiting for spark context initialization ... 16/06/09 03:39:03 INFO SparkContext: Running Spark version 1.6.1 16/06/09 03:39:03 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:39:03 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:39:03 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:39:03 INFO Utils: Successfully started service 'sparkDriver' on port 43484. 16/06/09 03:39:03 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:39:03 INFO Remoting: Starting remoting 16/06/09 03:39:04 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriverActorSystem@10.0.0.10:42688] 16/06/09 03:39:04 INFO Utils: Successfully started service 'sparkDriverActorSystem' on port 42688. 16/06/09 03:39:04 INFO SparkEnv: Registering MapOutputTracker 16/06/09 03:39:04 INFO SparkEnv: Registering BlockManagerMaster 16/06/09 03:39:04 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/blockmgr-84c8205d-dd7a-4f4e-ac59-ce6bd0e1fe27 16/06/09 03:39:04 INFO MemoryStore: MemoryStore started with capacity 457.9 MB 16/06/09 03:39:04 INFO SparkEnv: Registering OutputCommitCoordinator 16/06/09 03:39:04 INFO JettyUtils: Adding filter: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter 16/06/09 03:39:04 INFO Server: jetty-8.y.z-SNAPSHOT 16/06/09 03:39:04 INFO AbstractConnector: Started SelectChannelConnector@0.0.0.0:38830 16/06/09 03:39:04 INFO Utils: Successfully started service 'SparkUI' on port 38830. 16/06/09 03:39:04 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://10.0.0.10:38830 16/06/09 03:39:04 INFO YarnClusterScheduler: Created YarnClusterScheduler 16/06/09 03:39:04 INFO SchedulerExtensionServices: Starting Yarn extension services with app application_1465316720094_0025 and attemptId Some(appattempt_1465316720094_0025_000001) 16/06/09 03:39:04 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43435. 16/06/09 03:39:04 INFO NettyBlockTransferService: Server created on 43435 16/06/09 03:39:04 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:39:04 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.10:43435 with 457.9 MB RAM, BlockManagerId(driver, 10.0.0.10, 43435) 16/06/09 03:39:04 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:39:04 INFO EventLoggingListener: Logging events to wasb:///hdp/spark-events/application_1465316720094_0025_1 16/06/09 03:39:05 INFO YarnSchedulerBackend$YarnSchedulerEndpoint: ApplicationMaster registered as NettyRpcEndpointRef(spark://YarnAM@10.0.0.10:43484) 16/06/09 03:39:05 INFO YarnRMClient: Registering the ApplicationMaster 16/06/09 03:39:05 INFO ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:39:05 INFO YarnAllocator: Will request 2 executor containers, each with 2 cores and 6528 MB memory including 384 MB overhead 16/06/09 03:39:05 INFO YarnAllocator: Container request (host: Any, capability: ) 16/06/09 03:39:05 INFO YarnAllocator: Container request (host: Any, capability: ) 16/06/09 03:39:05 INFO ApplicationMaster: Started progress reporter thread with (heartbeat : 5000, initial allocation : 200) intervals 16/06/09 03:39:06 INFO AMRMClientImpl: Received new token for : 10.0.0.10:30050 16/06/09 03:39:06 INFO AMRMClientImpl: Received new token for : 10.0.0.8:30050 16/06/09 03:39:06 INFO YarnAllocator: Launching container container_e05_1465316720094_0025_01_000002 for on host 10.0.0.10 16/06/09 03:39:06 INFO YarnAllocator: Launching ExecutorRunnable. driverUrl: spark://CoarseGrainedScheduler@10.0.0.10:43484, executorHostname: 10.0.0.10 16/06/09 03:39:06 INFO YarnAllocator: Launching container container_e05_1465316720094_0025_01_000003 for on host 10.0.0.8 16/06/09 03:39:06 INFO ExecutorRunnable: Starting Executor Container 16/06/09 03:39:06 INFO YarnAllocator: Launching ExecutorRunnable. driverUrl: spark://CoarseGrainedScheduler@10.0.0.10:43484, executorHostname: 10.0.0.8 16/06/09 03:39:06 INFO ExecutorRunnable: Starting Executor Container 16/06/09 03:39:06 INFO ContainerManagementProtocolProxy: yarn.client.max-cached-nodemanagers-proxies : 0 16/06/09 03:39:06 INFO YarnAllocator: Received 2 containers from YARN, launching executors on 2 of them. 16/06/09 03:39:06 INFO ExecutorRunnable: Setting up ContainerLaunchContext 16/06/09 03:39:06 INFO ContainerManagementProtocolProxy: yarn.client.max-cached-nodemanagers-proxies : 0 16/06/09 03:39:06 INFO ExecutorRunnable: Setting up ContainerLaunchContext 16/06/09 03:39:06 INFO ExecutorRunnable: Preparing Local resources 16/06/09 03:39:06 INFO ExecutorRunnable: Preparing Local resources 16/06/09 03:39:06 INFO ExecutorRunnable: Prepared Local resources Map(pyspark.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip" userInfo: "kevingre" } size: 357163 timestamp: 1465443535000 type: FILE visibility: PRIVATE, py4j-0.9-src.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip" userInfo: "kevingre" } size: 44846 timestamp: 1465443535000 type: FILE visibility: PRIVATE, __spark_conf__ -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip" userInfo: "kevingre" } size: 105661 timestamp: 1465443536000 type: ARCHIVE visibility: PRIVATE) 16/06/09 03:39:06 INFO ExecutorRunnable: Prepared Local resources Map(pyspark.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip" userInfo: "kevingre" } size: 357163 timestamp: 1465443535000 type: FILE visibility: PRIVATE, py4j-0.9-src.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip" userInfo: "kevingre" } size: 44846 timestamp: 1465443535000 type: FILE visibility: PRIVATE, __spark_conf__ -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip" userInfo: "kevingre" } size: 105661 timestamp: 1465443536000 type: ARCHIVE visibility: PRIVATE) 16/06/09 03:39:06 INFO ExecutorRunnable: =============================================================================== YARN executor launch context: env: CLASSPATH -> {{PWD}}{{PWD}}/__spark_conf__/usr/hdp/current/spark-client/lib/spark-assembly.jar$HADOOP_CONF_DIR/usr/hdp/current/hadoop-client/*/usr/hdp/current/hadoop-client/lib/*/usr/hdp/current/hadoop-hdfs-client/*/usr/hdp/current/hadoop-hdfs-client/lib/*/usr/hdp/current/hadoop-yarn-client/*/usr/hdp/current/hadoop-yarn-client/lib/*$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure:/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_ARCHIVES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip#__spark_conf__ SPARK_LOG_URL_STDERR -> http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0025_01_000002/KevinGre/stderr?start=-4096 SPARK_YARN_CACHE_FILES_FILE_SIZES -> 357163,44846 SPARK_YARN_STAGING_DIR -> .sparkStaging/application_1465316720094_0025 SPARK_DIST_CLASSPATH -> :/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_FILES_VISIBILITIES -> PRIVATE,PRIVATE SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES -> 105661 SPARK_USER -> KevinGre SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS -> 1465443536000 SPARK_YARN_MODE -> true PYTHONHASHSEED -> 0 SPARK_YARN_CACHE_FILES_TIME_STAMPS -> 1465443535000,1465443535000 SPARK_HOME -> /usr/hdp/current/spark-client PYTHONPATH -> /usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip{{PWD}}/pyspark.zip{{PWD}}/py4j-0.9-src.zip SPARK_LOG_URL_STDOUT -> http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0025_01_000002/KevinGre/stdout?start=-4096 SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES -> PRIVATE SPARK_YARN_CACHE_FILES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip#py4j-0.9-src.zip command: {{JAVA_HOME}}/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir={{PWD}}/tmp '-Dspark.driver.port=43484' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir= -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:43484 --executor-id 1 --hostname 10.0.0.10 --cores 2 --app-id application_1465316720094_0025 --user-class-path file:$PWD/__app__.jar 1> /stdout 2> /stderr =============================================================================== 16/06/09 03:39:06 INFO ExecutorRunnable: =============================================================================== YARN executor launch context: env: CLASSPATH -> {{PWD}}{{PWD}}/__spark_conf__/usr/hdp/current/spark-client/lib/spark-assembly.jar$HADOOP_CONF_DIR/usr/hdp/current/hadoop-client/*/usr/hdp/current/hadoop-client/lib/*/usr/hdp/current/hadoop-hdfs-client/*/usr/hdp/current/hadoop-hdfs-client/lib/*/usr/hdp/current/hadoop-yarn-client/*/usr/hdp/current/hadoop-yarn-client/lib/*$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure:/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_ARCHIVES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip#__spark_conf__ SPARK_LOG_URL_STDERR -> http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0025_01_000003/KevinGre/stderr?start=-4096 SPARK_YARN_CACHE_FILES_FILE_SIZES -> 357163,44846 SPARK_YARN_STAGING_DIR -> .sparkStaging/application_1465316720094_0025 SPARK_DIST_CLASSPATH -> :/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_FILES_VISIBILITIES -> PRIVATE,PRIVATE SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES -> 105661 SPARK_USER -> KevinGre SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS -> 1465443536000 SPARK_YARN_MODE -> true PYTHONHASHSEED -> 0 SPARK_YARN_CACHE_FILES_TIME_STAMPS -> 1465443535000,1465443535000 SPARK_HOME -> /usr/hdp/current/spark-client PYTHONPATH -> /usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip{{PWD}}/pyspark.zip{{PWD}}/py4j-0.9-src.zip SPARK_LOG_URL_STDOUT -> http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0025_01_000003/KevinGre/stdout?start=-4096 SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES -> PRIVATE SPARK_YARN_CACHE_FILES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip#py4j-0.9-src.zip command: {{JAVA_HOME}}/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir={{PWD}}/tmp '-Dspark.driver.port=43484' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir= -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:43484 --executor-id 2 --hostname 10.0.0.8 --cores 2 --app-id application_1465316720094_0025 --user-class-path file:$PWD/__app__.jar 1> /stdout 2> /stderr =============================================================================== 16/06/09 03:39:06 INFO ContainerManagementProtocolProxy: Opening proxy : 10.0.0.10:30050 16/06/09 03:39:06 INFO ContainerManagementProtocolProxy: Opening proxy : 10.0.0.8:30050 16/06/09 03:39:10 INFO YarnClusterSchedulerBackend: Registered executor NettyRpcEndpointRef(null) (10.0.0.10:59196) with ID 1 16/06/09 03:39:10 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.10:36659 with 4.1 GB RAM, BlockManagerId(1, 10.0.0.10, 36659) 16/06/09 03:39:12 INFO YarnClusterSchedulerBackend: Registered executor NettyRpcEndpointRef(null) (10.0.0.8:60124) with ID 2 16/06/09 03:39:12 INFO YarnClusterSchedulerBackend: SchedulerBackend is ready for scheduling beginning after reached minRegisteredResourcesRatio: 0.8 16/06/09 03:39:12 INFO YarnClusterScheduler: YarnClusterScheduler.postStartHook done 16/06/09 03:39:12 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.8:38318 with 4.1 GB RAM, BlockManagerId(2, 10.0.0.8, 38318) 16/06/09 03:39:13 INFO SparkContext: Starting job: collect at probe.py:15 16/06/09 03:39:13 INFO DAGScheduler: Got job 0 (collect at probe.py:15) with 4 output partitions 16/06/09 03:39:13 INFO DAGScheduler: Final stage: ResultStage 0 (collect at probe.py:15) 16/06/09 03:39:13 INFO DAGScheduler: Parents of final stage: List() 16/06/09 03:39:13 INFO DAGScheduler: Missing parents: List() 16/06/09 03:39:13 INFO DAGScheduler: Submitting ResultStage 0 (PythonRDD[1] at collect at probe.py:15), which has no missing parents 16/06/09 03:39:13 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 3.9 KB, free 3.9 KB) 16/06/09 03:39:13 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.6 KB, free 6.6 KB) 16/06/09 03:39:13 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.10:43435 (size: 2.6 KB, free: 457.9 MB) 16/06/09 03:39:13 INFO SparkContext: Created broadcast 0 from broadcast at DAGScheduler.scala:1006 16/06/09 03:39:13 INFO DAGScheduler: Submitting 4 missing tasks from ResultStage 0 (PythonRDD[1] at collect at probe.py:15) 16/06/09 03:39:13 INFO YarnClusterScheduler: Adding task set 0.0 with 4 tasks 16/06/09 03:39:13 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, 10.0.0.8, partition 0,PROCESS_LOCAL, 2064 bytes) 16/06/09 03:39:13 INFO TaskSetManager: Starting task 1.0 in stage 0.0 (TID 1, 10.0.0.10, partition 1,PROCESS_LOCAL, 2064 bytes) 16/06/09 03:39:13 INFO TaskSetManager: Starting task 2.0 in stage 0.0 (TID 2, 10.0.0.8, partition 2,PROCESS_LOCAL, 2064 bytes) 16/06/09 03:39:13 INFO TaskSetManager: Starting task 3.0 in stage 0.0 (TID 3, 10.0.0.10, partition 3,PROCESS_LOCAL, 2083 bytes) 16/06/09 03:39:13 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.10:36659 (size: 2.6 KB, free: 4.1 GB) 16/06/09 03:39:13 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.8:38318 (size: 2.6 KB, free: 4.1 GB) 16/06/09 03:39:14 INFO TaskSetManager: Finished task 1.0 in stage 0.0 (TID 1) in 1400 ms on 10.0.0.10 (1/4) 16/06/09 03:39:14 INFO TaskSetManager: Finished task 3.0 in stage 0.0 (TID 3) in 1412 ms on 10.0.0.10 (2/4) 16/06/09 03:39:14 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 1452 ms on 10.0.0.8 (3/4) 16/06/09 03:39:14 INFO DAGScheduler: ResultStage 0 (collect at probe.py:15) finished in 1.456 s 16/06/09 03:39:14 INFO TaskSetManager: Finished task 2.0 in stage 0.0 (TID 2) in 1433 ms on 10.0.0.8 (4/4) 16/06/09 03:39:14 INFO DAGScheduler: Job 0 finished: collect at probe.py:15, took 1.606616 s 16/06/09 03:39:14 INFO YarnClusterScheduler: Removed TaskSet 0.0, whose tasks have all completed, from pool 16/06/09 03:39:14 INFO ApplicationMaster: Final app status: SUCCEEDED, exitCode: 0 16/06/09 03:39:14 INFO SparkContext: Invoking stop() from shutdown hook 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/metrics/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null} 16/06/09 03:39:14 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null} 16/06/09 03:39:15 INFO SparkUI: Stopped Spark web UI at http://10.0.0.10:38830 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.BlockManager.disk.diskSpaceUsed_MB, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.BlockManager.memory.maxMem_MB, value=8839 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.BlockManager.memory.memUsed_MB, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.BlockManager.memory.remainingMem_MB, value=8839 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.DAGScheduler.job.activeJobs, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.DAGScheduler.job.allJobs, value=1 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.DAGScheduler.stage.failedStages, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.DAGScheduler.stage.runningStages, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.DAGScheduler.stage.waitingStages, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.PS-MarkSweep.count, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.PS-MarkSweep.time, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.PS-Scavenge.count, value=4 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.PS-Scavenge.time, value=90 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.heap.committed, value=441974784 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.heap.init, value=460977216 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.heap.max, value=954728448 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.heap.usage, value=0.18066124703974465 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.heap.used, value=172611224 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.non-heap.committed, value=65470464 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.non-heap.init, value=24576000 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.non-heap.max, value=318767104 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.non-heap.usage, value=0.2029475914804559 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.non-heap.used, value=64693016 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.Code-Cache.usage, value=0.03596623738606771 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.Code-Cache.used, value=1810240 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Eden-Space.committed, value=115867648 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Eden-Space.init, value=115867648 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Eden-Space.max, value=320339968 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Eden-Space.usage, value=0.2515238061083904 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Eden-Space.used, value=80573128 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Old-Gen.committed, value=307232768 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Old-Gen.init, value=307232768 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Old-Gen.max, value=715653120 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Old-Gen.usage, value=0.1029746813651843 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Old-Gen.used, value=73694152 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Perm-Gen.committed, value=62914560 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Perm-Gen.usage, value=0.23427343368530273 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Perm-Gen.used, value=62888032 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Survivor-Space.committed, value=18874368 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Survivor-Space.init, value=18874368 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Survivor-Space.max, value=18874368 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Survivor-Space.usage, value=0.9992260403103299 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.pools.PS-Survivor-Space.used, value=18859760 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.total.committed, value=507445248 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.total.init, value=485553216 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.total.max, value=1273495552 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.driver.jvm.total.used, value=238601264 16/06/09 03:39:15 INFO metrics: type=TIMER, name=application_1465316720094_0025.driver.DAGScheduler.messageProcessingTime, count=11, min=0.056999999999999995, max=142.924322, mean=14.301413196934773, stddev=40.70974769577879, median=0.274601, p75=4.5757259999999995, p95=142.924322, p98=142.924322, p99=142.924322, p999=142.924322, mean_rate=1.0364645104434316, m1=0.11193781951894739, m5=0.02313996464973551, m15=0.007756212793144578, rate_unit=events/second, duration_unit=milliseconds 16/06/09 03:39:15 INFO YarnAllocator: Driver requested a total number of 0 executor(s). 16/06/09 03:39:15 INFO YarnClusterSchedulerBackend: Shutting down all executors 16/06/09 03:39:15 INFO YarnClusterSchedulerBackend: Asking each executor to shut down 16/06/09 03:39:15 INFO SchedulerExtensionServices: Stopping SchedulerExtensionServices (serviceOption=Some(), services=List(), started=false) 16/06/09 03:39:15 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 16/06/09 03:39:15 INFO MemoryStore: MemoryStore cleared 16/06/09 03:39:15 INFO BlockManager: BlockManager stopped 16/06/09 03:39:15 INFO BlockManagerMaster: BlockManagerMaster stopped 16/06/09 03:39:15 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 16/06/09 03:39:15 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon. 16/06/09 03:39:15 INFO SparkContext: Successfully stopped SparkContext 16/06/09 03:39:15 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports. 16/06/09 03:39:15 INFO ApplicationMaster: Unregistering ApplicationMaster with SUCCEEDED 16/06/09 03:39:15 INFO AMRMClientImpl: Waiting for application to be successfully unregistered. 16/06/09 03:39:15 INFO RemoteActorRefProvider$RemotingTerminator: Remoting shut down. 16/06/09 03:39:15 INFO ApplicationMaster: Deleting staging directory .sparkStaging/application_1465316720094_0025 16/06/09 03:39:15 INFO ShutdownHookManager: Shutdown hook called 16/06/09 03:39:15 INFO ShutdownHookManager: Deleting directory /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/spark-2e5f4ea9-f6b9-40bb-a2e0-de3bdb6801d2/pyspark-74179f21-f58d-48ca-ada3-d5993783e9c0 16/06/09 03:39:15 INFO ShutdownHookManager: Deleting directory /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/spark-2e5f4ea9-f6b9-40bb-a2e0-de3bdb6801d2 16/06/09 03:39:15 INFO MetricsSystemImpl: Stopping azure-file-system metrics system... 16/06/09 03:39:15 INFO MetricsSinkAdapter: azurefs2 thread interrupted. 16/06/09 03:39:15 INFO MetricsSystemImpl: azure-file-system metrics system stopped. 16/06/09 03:39:15 INFO MetricsSystemImpl: azure-file-system metrics system shutdown complete. End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:353 Log Contents: driver: 2.7.11 |Anaconda 2.3.0 (64-bit)| (default, Dec 6 2015, 18:08:32) [GCC 4.4.7 20120313 (Red Hat 4.4.7-1)] PYTHONHASHSEED=0 PYSPARK_PYTHON=/usr/bin/anaconda/bin/python executors: 2.7.11 |Anaconda 2.3.0 (64-bit)| (default, Dec 6 2015, 18:08:32) [GCC 4.4.7 20120313 (Red Hat 4.4.7-1)] PYTHONHASHSEED=0 PYSPARK_PYTHON=/usr/bin/anaconda/bin/python End of LogType:stdout Container: container_e05_1465316720094_0025_01_000003 on 10.0.0.8_30050 ========================================================================= LogType:directory.info Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:4767 Log Contents: ls -l: total 28 -rw------- 1 nobody hadoop 103 Jun 9 03:39 container_tokens -rwx------ 1 nobody hadoop 7141 Jun 9 03:39 launch_container.sh lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:39 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/176/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:39 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/175/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:39 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/177/__spark_conf__2196432329335185680.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:39 tmp find -L . -maxdepth 5 -ls: 3539239 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:39 . 3539286 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:39 ./pyspark.zip 3539293 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:39 ./tmp 3539357 8 -rwx------ 1 nobody hadoop 7141 Jun 9 03:39 ./launch_container.sh 3539358 4 -rw------- 1 nobody hadoop 103 Jun 9 03:39 ./container_tokens 3539294 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:39 ./__spark_conf__ 3539331 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:39 ./__spark_conf__/yarn.exclude 3539349 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:39 ./__spark_conf__/slaves 3539346 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:39 ./__spark_conf__/health_check 3539338 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:39 ./__spark_conf__/configuration.xsl 3539332 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:39 ./__spark_conf__/core-site.xml 3539335 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:39 ./__spark_conf__/capacity-scheduler.xml 3539354 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:39 ./__spark_conf__/taskcontroller.cfg 3539340 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:39 ./__spark_conf__/hdfs-site.xml 3539330 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:39 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539296 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:39 ./__spark_conf__/hadoop-env.sh 3539326 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:39 ./__spark_conf__/log4j.properties 3539353 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:39 ./__spark_conf__/ssl-server.xml.example 3539328 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:39 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539356 4 -r-x------ 1 nobody nogroup 2532 Jun 9 03:39 ./__spark_conf__/__spark_conf__.properties 3539351 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:39 ./__spark_conf__/metrics.properties 3539350 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:39 ./__spark_conf__/topology_mappings.data 3539295 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:39 ./__spark_conf__/mapred-site.xml 3539347 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:39 ./__spark_conf__/ssl-client.xml.example 3539333 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:39 ./__spark_conf__/dfs.exclude 3539345 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:39 ./__spark_conf__/mapred-env.sh 3539337 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:39 ./__spark_conf__/mapred-site.xml.template 3539339 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:39 ./__spark_conf__/yarn-env.sh 3539336 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:39 ./__spark_conf__/topology_script.py 3539329 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:39 ./__spark_conf__/yarn-site.xml 3539327 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:39 ./__spark_conf__/hadoop-metrics2.properties 3539355 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:39 ./__spark_conf__/mapred-queues.xml.template 3539344 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:39 ./__spark_conf__/hadoop-metrics.properties 3539334 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:39 ./__spark_conf__/ssl-client.xml 3539343 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:39 ./__spark_conf__/task-log4j.properties 3539348 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:39 ./__spark_conf__/hadoop-policy.xml 3539341 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:39 ./__spark_conf__/container-executor.cfg 3539352 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:39 ./__spark_conf__/ssl-server.xml 3539342 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:39 ./__spark_conf__/commons-logging.properties 3539289 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:39 ./py4j-0.9-src.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:7141 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0025" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/__spark_conf__2196432329335185680.zip#__spark_conf__" export SPARK_LOG_URL_STDERR="http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0025_01_000003/KevinGre/stderr?start=-4096" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.8" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105661" export JVM_PID="$$" export SPARK_HOME="/usr/hdp/current/spark-client" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465443535000,1465443535000" export SPARK_USER="KevinGre" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003" export NM_PORT="30050" export LOGNAME="KevinGre" export SPARK_LOG_URL_STDOUT="http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0025_01_000003/KevinGre/stdout?start=-4096" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export NM_HTTP_PORT="30060" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0025/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465443536000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/container_tokens" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export CONTAINER_ID="container_e05_1465316720094_0025_01_000003" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/175/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/177/__spark_conf__2196432329335185680.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/176/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir=$PWD/tmp '-Dspark.driver.port=43484' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003 -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:43484 --executor-id 2 --hostname 10.0.0.8 --cores 2 --app-id application_1465316720094_0025 --user-class-path file:$PWD/__app__.jar 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0025/container_e05_1465316720094_0025_01_000003/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:11660 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:39:10 INFO CoarseGrainedExecutorBackend: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:39:11 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:39:11 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:39:11 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:39:11 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:39:12 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:39:12 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:39:12 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:39:12 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:39:12 INFO Remoting: Starting remoting 16/06/09 03:39:12 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkExecutorActorSystem@10.0.0.8:35094] 16/06/09 03:39:12 INFO Utils: Successfully started service 'sparkExecutorActorSystem' on port 35094. 16/06/09 03:39:12 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0025/blockmgr-d436eac3-6b24-4a48-aa76-5ed1fa9080ed 16/06/09 03:39:12 INFO MemoryStore: MemoryStore started with capacity 4.1 GB 16/06/09 03:39:12 INFO CoarseGrainedExecutorBackend: Connecting to driver: spark://CoarseGrainedScheduler@10.0.0.10:43484 16/06/09 03:39:12 INFO CoarseGrainedExecutorBackend: Successfully registered with driver 16/06/09 03:39:12 INFO Executor: Starting executor ID 2 on host 10.0.0.8 16/06/09 03:39:12 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38318. 16/06/09 03:39:12 INFO NettyBlockTransferService: Server created on 38318 16/06/09 03:39:12 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:39:12 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:39:13 INFO CoarseGrainedExecutorBackend: Got assigned task 0 16/06/09 03:39:13 INFO CoarseGrainedExecutorBackend: Got assigned task 2 16/06/09 03:39:13 INFO Executor: Running task 2.0 in stage 0.0 (TID 2) 16/06/09 03:39:13 INFO Executor: Running task 0.0 in stage 0.0 (TID 0) 16/06/09 03:39:13 INFO TorrentBroadcast: Started reading broadcast variable 0 16/06/09 03:39:13 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.6 KB, free 2.6 KB) 16/06/09 03:39:13 INFO TorrentBroadcast: Reading broadcast variable 0 took 109 ms 16/06/09 03:39:13 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 3.9 KB, free 6.6 KB) 16/06/09 03:39:14 INFO PythonRunner: Times: total = 1040, boot = 1022, init = 18, finish = 0 16/06/09 03:39:14 INFO PythonRunner: Times: total = 1045, boot = 1025, init = 20, finish = 0 16/06/09 03:39:14 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 962 bytes result sent to driver 16/06/09 03:39:14 INFO Executor: Finished task 2.0 in stage 0.0 (TID 2). 962 bytes result sent to driver 16/06/09 03:39:15 INFO CoarseGrainedExecutorBackend: Driver commanded a shutdown 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.file.largeRead_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.file.read_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.file.read_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.file.write_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.file.write_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.hdfs.largeRead_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.hdfs.read_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.hdfs.read_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.hdfs.write_bytes, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.filesystem.hdfs.write_ops, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.threadpool.activeTasks, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.threadpool.completeTasks, value=2 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.threadpool.currentPool_size, value=2 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.executor.threadpool.maxPool_size, value=2147483647 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.PS-MarkSweep.count, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.PS-MarkSweep.time, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.PS-Scavenge.count, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.PS-Scavenge.time, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.heap.committed, value=6174015488 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.heap.init, value=6442450944 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.heap.max, value=6174015488 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.heap.usage, value=0.2452633912148683 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.heap.used, value=1514259976 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.non-heap.committed, value=45547520 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.non-heap.init, value=24576000 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.non-heap.max, value=318767104 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.non-heap.usage, value=0.13819548958226255 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.non-heap.used, value=44052176 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.Code-Cache.usage, value=0.030101776123046875 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.Code-Cache.used, value=1515072 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Eden-Space.committed, value=1610612736 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Eden-Space.init, value=1610612736 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Eden-Space.max, value=1610612736 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Eden-Space.usage, value=0.9401763329903284 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Eden-Space.used, value=1514259976 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Old-Gen.committed, value=4294967296 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Old-Gen.init, value=4294967296 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Old-Gen.max, value=4294967296 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Old-Gen.usage, value=0.0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Old-Gen.used, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Perm-Gen.committed, value=42991616 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Perm-Gen.usage, value=0.15846654772758484 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Perm-Gen.used, value=42538040 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Survivor-Space.committed, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Survivor-Space.init, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Survivor-Space.max, value=268435456 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Survivor-Space.usage, value=0.0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.pools.PS-Survivor-Space.used, value=0 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.total.committed, value=6219563008 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.total.init, value=6467026944 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.total.max, value=6492782592 16/06/09 03:39:15 INFO metrics: type=GAUGE, name=application_1465316720094_0025.2.jvm.total.used, value=1558317200 16/06/09 03:39:15 INFO MemoryStore: MemoryStore cleared 16/06/09 03:39:15 INFO BlockManager: BlockManager stopped 16/06/09 03:39:15 ERROR CoarseGrainedExecutorBackend: Driver 10.0.0.10:43484 disassociated! Shutting down. 16/06/09 03:39:15 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon. 16/06/09 03:39:15 INFO ShutdownHookManager: Shutdown hook called 16/06/09 03:39:15 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports. End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:39:17 +0000 2016 LogLength:0 Log Contents: End of LogType:stdout KevinGre@hn0-keving:~/probe$ KevinGre@hn0-keving:~/probe$ export PYSPARK_PYTHON="python3" KevinGre@hn0-keving:~/probe$ export PYSPARK_DRIVER_PYTHON="python3" KevinGre@hn0-keving:~/probe$ '/usr/hdp/current/spark-client/bin/spark-submit' '--master' 'yarn' '--deploy-mode' 'cluster' '--name' 'probeVersion' '--conf' 'spark.yarn.maxAppAttempts=1' '--conf' 'spark.yarn.tags=livy_a17e8ded-2121-4529-95c5-7cc1791311a0' probe.py SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:47:41 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:47:42 INFO TimelineClientImpl: Timeline service address: http://hn0-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8188/ws/v1/timeline/ 16/06/09 03:47:43 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties 16/06/09 03:47:43 INFO WasbAzureIaasSink: Init starting. 16/06/09 03:47:43 INFO AzureIaasSink: Init starting. Initializing MdsLogger. 16/06/09 03:47:43 INFO AzureIaasSink: Init completed. 16/06/09 03:47:43 INFO WasbAzureIaasSink: Init completed. 16/06/09 03:47:43 INFO MetricsSinkAdapter: Sink azurefs2 started 16/06/09 03:47:43 INFO MetricsSystemImpl: Scheduled snapshot period at 60 second(s). 16/06/09 03:47:43 INFO MetricsSystemImpl: azure-file-system metrics system started 16/06/09 03:47:43 INFO ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:47:43 INFO Client: Requesting a new application from cluster with 2 NodeManagers 16/06/09 03:47:43 INFO Client: Verifying our application has not requested more than the maximum memory capability of the cluster (25600 MB per container) 16/06/09 03:47:43 INFO Client: Will allocate AM container, with 1408 MB memory including 384 MB overhead 16/06/09 03:47:43 INFO Client: Setting up container launch context for our AM 16/06/09 03:47:43 INFO Client: Setting up the launch environment for our AM container 16/06/09 03:47:43 INFO Client: Preparing resources for our AM container 16/06/09 03:47:43 INFO Client: Uploading resource file:/home/KevinGre/probe/probe.py -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/probe.py 16/06/09 03:47:44 INFO Client: Uploading resource file:/usr/hdp/2.4.2.0-258/spark/python/lib/pyspark.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip 16/06/09 03:47:44 INFO Client: Uploading resource file:/usr/hdp/2.4.2.0-258/spark/python/lib/py4j-0.9-src.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip 16/06/09 03:47:44 INFO Client: Uploading resource file:/tmp/spark-f1866fe7-104d-43c7-9b2a-608ad8f27445/__spark_conf__6294409567093172894.zip -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip 16/06/09 03:47:45 INFO SecurityManager: Changing view acls to: KevinGre 16/06/09 03:47:45 INFO SecurityManager: Changing modify acls to: KevinGre 16/06/09 03:47:45 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(KevinGre); users with modify permissions: Set(KevinGre) 16/06/09 03:47:45 INFO Client: Submitting application 27 to ResourceManager 16/06/09 03:47:45 INFO YarnClientImpl: Submitted application application_1465316720094_0027 16/06/09 03:47:46 INFO Client: Application report for application_1465316720094_0027 (state: ACCEPTED) 16/06/09 03:47:46 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: N/A ApplicationMaster RPC port: -1 queue: default start time: 1465444065224 final status: UNDEFINED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0027/ user: KevinGre 16/06/09 03:47:47 INFO Client: Application report for application_1465316720094_0027 (state: ACCEPTED) 16/06/09 03:47:48 INFO Client: Application report for application_1465316720094_0027 (state: ACCEPTED) 16/06/09 03:47:49 INFO Client: Application report for application_1465316720094_0027 (state: ACCEPTED) 16/06/09 03:47:50 INFO Client: Application report for application_1465316720094_0027 (state: ACCEPTED) 16/06/09 03:47:51 INFO Client: Application report for application_1465316720094_0027 (state: ACCEPTED) 16/06/09 03:47:52 INFO Client: Application report for application_1465316720094_0027 (state: ACCEPTED) 16/06/09 03:47:53 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:47:53 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: 10.0.0.10 ApplicationMaster RPC port: 0 queue: default start time: 1465444065224 final status: UNDEFINED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0027/ user: KevinGre 16/06/09 03:47:54 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:47:55 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:47:56 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:47:57 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:47:58 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:47:59 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:48:00 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:48:01 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:48:02 INFO Client: Application report for application_1465316720094_0027 (state: RUNNING) 16/06/09 03:48:03 INFO Client: Application report for application_1465316720094_0027 (state: FINISHED) 16/06/09 03:48:03 INFO Client: client token: N/A diagnostics: N/A ApplicationMaster host: 10.0.0.10 ApplicationMaster RPC port: 0 queue: default start time: 1465444065224 final status: SUCCEEDED tracking URL: http://hn1-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8088/proxy/application_1465316720094_0027/ user: KevinGre 16/06/09 03:48:03 INFO ShutdownHookManager: Shutdown hook called 16/06/09 03:48:03 INFO ShutdownHookManager: Deleting directory /tmp/spark-f1866fe7-104d-43c7-9b2a-608ad8f27445 KevinGre@hn0-keving:~/probe$ yarn logs -applicationId application_1465316720094_0027 16/06/09 03:48:17 INFO impl.TimelineClientImpl: Timeline service address: http://hn0-keving.gnw4jk51tgbezpywcdl25ba0xb.dx.internal.cloudapp.net:8188/ws/v1/timeline/ 16/06/09 03:48:18 INFO client.ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:48:18 INFO zlib.ZlibFactory: Successfully loaded & initialized native-zlib library 16/06/09 03:48:18 INFO compress.CodecPool: Got brand-new decompressor [.deflate] Container: container_e05_1465316720094_0027_01_000001 on 10.0.0.10_30050 ========================================================================== LogType:directory.info Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:4977 Log Contents: ls -l: total 32 -rw------- 1 nobody hadoop 74 Jun 9 03:47 container_tokens -rwx------ 1 nobody hadoop 7123 Jun 9 03:47 launch_container.sh lrwxrwxrwx 1 nobody hadoop 73 Jun 9 03:47 probe.py -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/196/probe.py lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:47 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/194/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:47 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/193/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:47 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/195/__spark_conf__6294409567093172894.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:47 tmp find -L . -maxdepth 5 -ls: 3538950 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:47 . 3539060 4 -r-x------ 1 nobody nogroup 403 Jun 9 03:47 ./probe.py 3539221 8 -rwx------ 1 nobody hadoop 7123 Jun 9 03:47 ./launch_container.sh 3539222 4 -rw------- 1 nobody hadoop 74 Jun 9 03:47 ./container_tokens 3539061 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:47 ./__spark_conf__ 3539213 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:47 ./__spark_conf__/ssl-server.xml 3539198 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:47 ./__spark_conf__/mapred-site.xml.template 3539175 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:47 ./__spark_conf__/log4j.properties 3539216 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:47 ./__spark_conf__/mapred-queues.xml.template 3539215 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:47 ./__spark_conf__/taskcontroller.cfg 3539195 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:47 ./__spark_conf__/ssl-client.xml 3539210 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:47 ./__spark_conf__/slaves 3539214 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:47 ./__spark_conf__/ssl-server.xml.example 3539203 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:47 ./__spark_conf__/commons-logging.properties 3539176 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2.properties 3539191 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539200 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:47 ./__spark_conf__/yarn-env.sh 3539062 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:47 ./__spark_conf__/mapred-site.xml 3539202 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:47 ./__spark_conf__/container-executor.cfg 3539209 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:47 ./__spark_conf__/hadoop-policy.xml 3539193 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:47 ./__spark_conf__/core-site.xml 3539205 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:47 ./__spark_conf__/hadoop-metrics.properties 3539189 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539196 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:47 ./__spark_conf__/capacity-scheduler.xml 3539212 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:47 ./__spark_conf__/metrics.properties 3539197 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:47 ./__spark_conf__/topology_script.py 3539208 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:47 ./__spark_conf__/ssl-client.xml.example 3539217 4 -r-x------ 1 nobody nogroup 2431 Jun 9 03:47 ./__spark_conf__/__spark_conf__.properties 3539199 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:47 ./__spark_conf__/configuration.xsl 3539206 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:47 ./__spark_conf__/mapred-env.sh 3539204 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:47 ./__spark_conf__/task-log4j.properties 3539194 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:47 ./__spark_conf__/dfs.exclude 3539201 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:47 ./__spark_conf__/hdfs-site.xml 3539211 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:47 ./__spark_conf__/topology_mappings.data 3539190 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:47 ./__spark_conf__/yarn-site.xml 3539207 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:47 ./__spark_conf__/health_check 3539192 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:47 ./__spark_conf__/yarn.exclude 3539174 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:47 ./__spark_conf__/hadoop-env.sh 3539056 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:47 ./py4j-0.9-src.zip 3539220 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:47 ./tmp 3538961 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:47 ./pyspark.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:7123 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0027" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip#__spark_conf__" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.10" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export APPLICATION_WEB_PROXY_BASE="/proxy/application_1465316720094_0027" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105560" export JVM_PID="$$" export SPARK_USER="KevinGre" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465444064000,1465444064000" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001" export NM_PORT="30050" export LOGNAME="KevinGre" export APP_SUBMIT_TIME_ENV="1465444065224" export MAX_APP_ATTEMPTS="1" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027" export NM_HTTP_PORT="30060" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465444064000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/container_tokens" export PYSPARK_PYTHON="python3" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export PYSPARK_DRIVER_PYTHON="python3" export CONTAINER_ID="container_e05_1465316720094_0027_01_000001" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/196/probe.py" "probe.py" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/193/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/194/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/195/__spark_conf__6294409567093172894.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -Xmx1024m -Djava.io.tmpdir=$PWD/tmp -Dhdp.version=2.4.2.0-258 '-Detwlogger.component=sparkdriver' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkdriver_\${user.name}.log' '-Djava.io.tmpdir=/var/tmp/spark' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001 -XX:MaxPermSize=256m org.apache.spark.deploy.yarn.ApplicationMaster --class 'org.apache.spark.deploy.PythonRunner' --primary-py-file probe.py --executor-memory 6144m --executor-cores 2 --properties-file $PWD/__spark_conf__/__spark_conf__.properties 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000001/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:32716 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:47:49 INFO ApplicationMaster: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:47:49 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:47:49 INFO ApplicationMaster: ApplicationAttemptId: appattempt_1465316720094_0027_000001 16/06/09 03:47:50 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties 16/06/09 03:47:50 INFO WasbAzureIaasSink: Init starting. 16/06/09 03:47:50 INFO AzureIaasSink: Init starting. Initializing MdsLogger. 16/06/09 03:47:50 INFO AzureIaasSink: Init completed. 16/06/09 03:47:50 INFO WasbAzureIaasSink: Init completed. 16/06/09 03:47:50 INFO MetricsSinkAdapter: Sink azurefs2 started 16/06/09 03:47:50 INFO MetricsSystemImpl: Scheduled snapshot period at 60 second(s). 16/06/09 03:47:50 INFO MetricsSystemImpl: azure-file-system metrics system started 16/06/09 03:47:50 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:47:50 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:47:50 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:47:50 INFO ApplicationMaster: Starting the user application in a separate Thread 16/06/09 03:47:50 INFO ApplicationMaster: Waiting for spark context initialization 16/06/09 03:47:50 INFO ApplicationMaster: Waiting for spark context initialization ... 16/06/09 03:47:51 INFO SparkContext: Running Spark version 1.6.1 16/06/09 03:47:51 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:47:51 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:47:51 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:47:51 INFO Utils: Successfully started service 'sparkDriver' on port 38830. 16/06/09 03:47:51 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:47:52 INFO Remoting: Starting remoting 16/06/09 03:47:52 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriverActorSystem@10.0.0.10:45878] 16/06/09 03:47:52 INFO Utils: Successfully started service 'sparkDriverActorSystem' on port 45878. 16/06/09 03:47:52 INFO SparkEnv: Registering MapOutputTracker 16/06/09 03:47:52 INFO SparkEnv: Registering BlockManagerMaster 16/06/09 03:47:52 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/blockmgr-88f143f3-337f-437f-a31b-ed7e37e438bf 16/06/09 03:47:52 INFO MemoryStore: MemoryStore started with capacity 457.9 MB 16/06/09 03:47:52 INFO SparkEnv: Registering OutputCommitCoordinator 16/06/09 03:47:52 INFO JettyUtils: Adding filter: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter 16/06/09 03:47:52 INFO Server: jetty-8.y.z-SNAPSHOT 16/06/09 03:47:52 INFO AbstractConnector: Started SelectChannelConnector@0.0.0.0:36407 16/06/09 03:47:52 INFO Utils: Successfully started service 'SparkUI' on port 36407. 16/06/09 03:47:52 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://10.0.0.10:36407 16/06/09 03:47:52 INFO YarnClusterScheduler: Created YarnClusterScheduler 16/06/09 03:47:52 INFO SchedulerExtensionServices: Starting Yarn extension services with app application_1465316720094_0027 and attemptId Some(appattempt_1465316720094_0027_000001) 16/06/09 03:47:52 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 40007. 16/06/09 03:47:52 INFO NettyBlockTransferService: Server created on 40007 16/06/09 03:47:52 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:47:52 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.10:40007 with 457.9 MB RAM, BlockManagerId(driver, 10.0.0.10, 40007) 16/06/09 03:47:52 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:47:53 INFO EventLoggingListener: Logging events to wasb:///hdp/spark-events/application_1465316720094_0027_1 16/06/09 03:47:53 INFO YarnSchedulerBackend$YarnSchedulerEndpoint: ApplicationMaster registered as NettyRpcEndpointRef(spark://YarnAM@10.0.0.10:38830) 16/06/09 03:47:53 INFO YarnRMClient: Registering the ApplicationMaster 16/06/09 03:47:53 INFO ConfiguredRMFailoverProxyProvider: Failing over to rm2 16/06/09 03:47:53 INFO YarnAllocator: Will request 2 executor containers, each with 2 cores and 6528 MB memory including 384 MB overhead 16/06/09 03:47:53 INFO YarnAllocator: Container request (host: Any, capability: ) 16/06/09 03:47:53 INFO YarnAllocator: Container request (host: Any, capability: ) 16/06/09 03:47:53 INFO ApplicationMaster: Started progress reporter thread with (heartbeat : 5000, initial allocation : 200) intervals 16/06/09 03:47:53 INFO AMRMClientImpl: Received new token for : 10.0.0.10:30050 16/06/09 03:47:53 INFO YarnAllocator: Launching container container_e05_1465316720094_0027_01_000002 for on host 10.0.0.10 16/06/09 03:47:53 INFO YarnAllocator: Launching ExecutorRunnable. driverUrl: spark://CoarseGrainedScheduler@10.0.0.10:38830, executorHostname: 10.0.0.10 16/06/09 03:47:53 INFO ExecutorRunnable: Starting Executor Container 16/06/09 03:47:53 INFO YarnAllocator: Received 1 containers from YARN, launching executors on 1 of them. 16/06/09 03:47:53 INFO ContainerManagementProtocolProxy: yarn.client.max-cached-nodemanagers-proxies : 0 16/06/09 03:47:53 INFO ExecutorRunnable: Setting up ContainerLaunchContext 16/06/09 03:47:53 INFO ExecutorRunnable: Preparing Local resources 16/06/09 03:47:53 INFO ExecutorRunnable: Prepared Local resources Map(pyspark.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip" userInfo: "kevingre" } size: 357163 timestamp: 1465444064000 type: FILE visibility: PRIVATE, py4j-0.9-src.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip" userInfo: "kevingre" } size: 44846 timestamp: 1465444064000 type: FILE visibility: PRIVATE, __spark_conf__ -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip" userInfo: "kevingre" } size: 105560 timestamp: 1465444064000 type: ARCHIVE visibility: PRIVATE) 16/06/09 03:47:53 INFO ExecutorRunnable: =============================================================================== YARN executor launch context: env: CLASSPATH -> {{PWD}}{{PWD}}/__spark_conf__/usr/hdp/current/spark-client/lib/spark-assembly.jar$HADOOP_CONF_DIR/usr/hdp/current/hadoop-client/*/usr/hdp/current/hadoop-client/lib/*/usr/hdp/current/hadoop-hdfs-client/*/usr/hdp/current/hadoop-hdfs-client/lib/*/usr/hdp/current/hadoop-yarn-client/*/usr/hdp/current/hadoop-yarn-client/lib/*$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure:/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_ARCHIVES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip#__spark_conf__ SPARK_LOG_URL_STDERR -> http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0027_01_000002/KevinGre/stderr?start=-4096 SPARK_YARN_CACHE_FILES_FILE_SIZES -> 357163,44846 SPARK_YARN_STAGING_DIR -> .sparkStaging/application_1465316720094_0027 SPARK_DIST_CLASSPATH -> :/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_FILES_VISIBILITIES -> PRIVATE,PRIVATE SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES -> 105560 SPARK_USER -> KevinGre SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS -> 1465444064000 SPARK_YARN_MODE -> true PYTHONHASHSEED -> 0 SPARK_YARN_CACHE_FILES_TIME_STAMPS -> 1465444064000,1465444064000 SPARK_HOME -> /usr/hdp/current/spark-client PYTHONPATH -> /usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip{{PWD}}/pyspark.zip{{PWD}}/py4j-0.9-src.zip SPARK_LOG_URL_STDOUT -> http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0027_01_000002/KevinGre/stdout?start=-4096 SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES -> PRIVATE SPARK_YARN_CACHE_FILES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip#py4j-0.9-src.zip command: {{JAVA_HOME}}/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir={{PWD}}/tmp '-Dspark.driver.port=38830' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir= -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:38830 --executor-id 1 --hostname 10.0.0.10 --cores 2 --app-id application_1465316720094_0027 --user-class-path file:$PWD/__app__.jar 1> /stdout 2> /stderr =============================================================================== 16/06/09 03:47:53 INFO ContainerManagementProtocolProxy: Opening proxy : 10.0.0.10:30050 16/06/09 03:47:54 INFO AMRMClientImpl: Received new token for : 10.0.0.8:30050 16/06/09 03:47:54 INFO YarnAllocator: Launching container container_e05_1465316720094_0027_01_000003 for on host 10.0.0.8 16/06/09 03:47:54 INFO YarnAllocator: Launching ExecutorRunnable. driverUrl: spark://CoarseGrainedScheduler@10.0.0.10:38830, executorHostname: 10.0.0.8 16/06/09 03:47:54 INFO YarnAllocator: Received 1 containers from YARN, launching executors on 1 of them. 16/06/09 03:47:54 INFO ExecutorRunnable: Starting Executor Container 16/06/09 03:47:54 INFO ContainerManagementProtocolProxy: yarn.client.max-cached-nodemanagers-proxies : 0 16/06/09 03:47:54 INFO ExecutorRunnable: Setting up ContainerLaunchContext 16/06/09 03:47:54 INFO ExecutorRunnable: Preparing Local resources 16/06/09 03:47:54 INFO ExecutorRunnable: Prepared Local resources Map(pyspark.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip" userInfo: "kevingre" } size: 357163 timestamp: 1465444064000 type: FILE visibility: PRIVATE, py4j-0.9-src.zip -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip" userInfo: "kevingre" } size: 44846 timestamp: 1465444064000 type: FILE visibility: PRIVATE, __spark_conf__ -> resource { scheme: "wasb" host: "wranglewestus.blob.core.windows.net" port: -1 file: "/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip" userInfo: "kevingre" } size: 105560 timestamp: 1465444064000 type: ARCHIVE visibility: PRIVATE) 16/06/09 03:47:54 INFO ExecutorRunnable: =============================================================================== YARN executor launch context: env: CLASSPATH -> {{PWD}}{{PWD}}/__spark_conf__/usr/hdp/current/spark-client/lib/spark-assembly.jar$HADOOP_CONF_DIR/usr/hdp/current/hadoop-client/*/usr/hdp/current/hadoop-client/lib/*/usr/hdp/current/hadoop-hdfs-client/*/usr/hdp/current/hadoop-hdfs-client/lib/*/usr/hdp/current/hadoop-yarn-client/*/usr/hdp/current/hadoop-yarn-client/lib/*$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure:/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_ARCHIVES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip#__spark_conf__ SPARK_LOG_URL_STDERR -> http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0027_01_000003/KevinGre/stderr?start=-4096 SPARK_YARN_CACHE_FILES_FILE_SIZES -> 357163,44846 SPARK_YARN_STAGING_DIR -> .sparkStaging/application_1465316720094_0027 SPARK_DIST_CLASSPATH -> :/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml SPARK_YARN_CACHE_FILES_VISIBILITIES -> PRIVATE,PRIVATE SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES -> 105560 SPARK_USER -> KevinGre SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS -> 1465444064000 SPARK_YARN_MODE -> true PYTHONHASHSEED -> 0 SPARK_YARN_CACHE_FILES_TIME_STAMPS -> 1465444064000,1465444064000 SPARK_HOME -> /usr/hdp/current/spark-client PYTHONPATH -> /usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip{{PWD}}/pyspark.zip{{PWD}}/py4j-0.9-src.zip SPARK_LOG_URL_STDOUT -> http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0027_01_000003/KevinGre/stdout?start=-4096 SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES -> PRIVATE SPARK_YARN_CACHE_FILES -> wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip#py4j-0.9-src.zip command: {{JAVA_HOME}}/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir={{PWD}}/tmp '-Dspark.driver.port=38830' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir= -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:38830 --executor-id 2 --hostname 10.0.0.8 --cores 2 --app-id application_1465316720094_0027 --user-class-path file:$PWD/__app__.jar 1> /stdout 2> /stderr =============================================================================== 16/06/09 03:47:54 INFO ContainerManagementProtocolProxy: Opening proxy : 10.0.0.8:30050 16/06/09 03:47:57 INFO YarnClusterSchedulerBackend: Registered executor NettyRpcEndpointRef(null) (10.0.0.10:42944) with ID 1 16/06/09 03:47:57 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.10:42372 with 4.1 GB RAM, BlockManagerId(1, 10.0.0.10, 42372) 16/06/09 03:48:01 INFO YarnClusterSchedulerBackend: Registered executor NettyRpcEndpointRef(null) (10.0.0.8:43928) with ID 2 16/06/09 03:48:01 INFO YarnClusterSchedulerBackend: SchedulerBackend is ready for scheduling beginning after reached minRegisteredResourcesRatio: 0.8 16/06/09 03:48:01 INFO YarnClusterScheduler: YarnClusterScheduler.postStartHook done 16/06/09 03:48:01 INFO BlockManagerMasterEndpoint: Registering block manager 10.0.0.8:38225 with 4.1 GB RAM, BlockManagerId(2, 10.0.0.8, 38225) 16/06/09 03:48:01 INFO SparkContext: Starting job: collect at probe.py:15 16/06/09 03:48:01 INFO DAGScheduler: Got job 0 (collect at probe.py:15) with 4 output partitions 16/06/09 03:48:01 INFO DAGScheduler: Final stage: ResultStage 0 (collect at probe.py:15) 16/06/09 03:48:01 INFO DAGScheduler: Parents of final stage: List() 16/06/09 03:48:01 INFO DAGScheduler: Missing parents: List() 16/06/09 03:48:01 INFO DAGScheduler: Submitting ResultStage 0 (PythonRDD[1] at collect at probe.py:15), which has no missing parents 16/06/09 03:48:01 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 4.1 KB, free 4.1 KB) 16/06/09 03:48:01 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.7 KB, free 6.8 KB) 16/06/09 03:48:01 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.10:40007 (size: 2.7 KB, free: 457.9 MB) 16/06/09 03:48:01 INFO SparkContext: Created broadcast 0 from broadcast at DAGScheduler.scala:1006 16/06/09 03:48:01 INFO DAGScheduler: Submitting 4 missing tasks from ResultStage 0 (PythonRDD[1] at collect at probe.py:15) 16/06/09 03:48:01 INFO YarnClusterScheduler: Adding task set 0.0 with 4 tasks 16/06/09 03:48:01 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, 10.0.0.10, partition 0,PROCESS_LOCAL, 2064 bytes) 16/06/09 03:48:01 INFO TaskSetManager: Starting task 1.0 in stage 0.0 (TID 1, 10.0.0.8, partition 1,PROCESS_LOCAL, 2064 bytes) 16/06/09 03:48:01 INFO TaskSetManager: Starting task 2.0 in stage 0.0 (TID 2, 10.0.0.10, partition 2,PROCESS_LOCAL, 2064 bytes) 16/06/09 03:48:01 INFO TaskSetManager: Starting task 3.0 in stage 0.0 (TID 3, 10.0.0.8, partition 3,PROCESS_LOCAL, 2083 bytes) 16/06/09 03:48:01 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.10:42372 (size: 2.7 KB, free: 4.1 GB) 16/06/09 03:48:01 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.0.0.8:38225 (size: 2.7 KB, free: 4.1 GB) 16/06/09 03:48:02 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 939 ms on 10.0.0.10 (1/4) 16/06/09 03:48:02 INFO TaskSetManager: Finished task 2.0 in stage 0.0 (TID 2) in 925 ms on 10.0.0.10 (2/4) 16/06/09 03:48:02 INFO TaskSetManager: Finished task 3.0 in stage 0.0 (TID 3) in 958 ms on 10.0.0.8 (3/4) 16/06/09 03:48:02 INFO TaskSetManager: Finished task 1.0 in stage 0.0 (TID 1) in 966 ms on 10.0.0.8 (4/4) 16/06/09 03:48:02 INFO DAGScheduler: ResultStage 0 (collect at probe.py:15) finished in 0.982 s 16/06/09 03:48:02 INFO YarnClusterScheduler: Removed TaskSet 0.0, whose tasks have all completed, from pool 16/06/09 03:48:02 INFO DAGScheduler: Job 0 finished: collect at probe.py:15, took 1.111055 s 16/06/09 03:48:02 INFO ApplicationMaster: Final app status: SUCCEEDED, exitCode: 0 16/06/09 03:48:02 INFO SparkContext: Invoking stop() from shutdown hook 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/metrics/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null} 16/06/09 03:48:02 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null} 16/06/09 03:48:02 INFO SparkUI: Stopped Spark web UI at http://10.0.0.10:36407 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.BlockManager.disk.diskSpaceUsed_MB, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.BlockManager.memory.maxMem_MB, value=8839 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.BlockManager.memory.memUsed_MB, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.BlockManager.memory.remainingMem_MB, value=8839 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.DAGScheduler.job.activeJobs, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.DAGScheduler.job.allJobs, value=1 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.DAGScheduler.stage.failedStages, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.DAGScheduler.stage.runningStages, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.DAGScheduler.stage.waitingStages, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.PS-MarkSweep.count, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.PS-MarkSweep.time, value=0 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.PS-Scavenge.count, value=4 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.PS-Scavenge.time, value=137 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.heap.committed, value=557842432 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.heap.init, value=460977216 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.heap.max, value=954728448 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.heap.usage, value=0.18566486457099893 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.heap.used, value=177503632 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.non-heap.committed, value=65470464 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.non-heap.init, value=24576000 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.non-heap.max, value=318767104 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.non-heap.usage, value=0.20288407175164475 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.non-heap.used, value=64672768 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.Code-Cache.usage, value=0.035761515299479164 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.Code-Cache.used, value=1799936 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Eden-Space.committed, value=231735296 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Eden-Space.init, value=115867648 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Eden-Space.max, value=320339968 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Eden-Space.usage, value=0.26750650109323854 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Eden-Space.used, value=85693024 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Old-Gen.committed, value=307232768 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Old-Gen.init, value=307232768 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Old-Gen.max, value=715653120 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Old-Gen.usage, value=0.10264158144101991 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Old-Gen.used, value=73455768 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Perm-Gen.committed, value=62914560 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Perm-Gen.usage, value=0.23423820734024048 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Perm-Gen.used, value=62880664 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Survivor-Space.committed, value=18874368 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Survivor-Space.init, value=18874368 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Survivor-Space.max, value=18874368 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Survivor-Space.usage, value=0.9983562893337674 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.pools.PS-Survivor-Space.used, value=18843344 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.total.committed, value=623312896 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.total.init, value=485553216 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.total.max, value=1273495552 16/06/09 03:48:02 INFO metrics: type=GAUGE, name=application_1465316720094_0027.driver.jvm.total.used, value=243406880 16/06/09 03:48:02 INFO metrics: type=TIMER, name=application_1465316720094_0027.driver.DAGScheduler.messageProcessingTime, count=11, min=0.0581, max=119.689129, mean=12.538754498570107, stddev=33.95636993652023, median=0.26900199999999996, p75=5.204332, p95=119.689129, p98=119.689129, p99=119.689129, p999=119.689129, mean_rate=1.0617316313818008, m1=0.2799555853706767, m5=0.21652854617838252, m15=0.20554015199510328, rate_unit=events/second, duration_unit=milliseconds 16/06/09 03:48:03 INFO YarnAllocator: Driver requested a total number of 0 executor(s). 16/06/09 03:48:03 INFO YarnClusterSchedulerBackend: Shutting down all executors 16/06/09 03:48:03 INFO YarnClusterSchedulerBackend: Asking each executor to shut down 16/06/09 03:48:03 INFO SchedulerExtensionServices: Stopping SchedulerExtensionServices (serviceOption=Some(), services=List(), started=false) 16/06/09 03:48:03 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 16/06/09 03:48:03 INFO MemoryStore: MemoryStore cleared 16/06/09 03:48:03 INFO BlockManager: BlockManager stopped 16/06/09 03:48:03 INFO BlockManagerMaster: BlockManagerMaster stopped 16/06/09 03:48:03 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 16/06/09 03:48:03 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon. 16/06/09 03:48:03 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports. 16/06/09 03:48:03 INFO SparkContext: Successfully stopped SparkContext 16/06/09 03:48:03 INFO ApplicationMaster: Unregistering ApplicationMaster with SUCCEEDED 16/06/09 03:48:03 INFO AMRMClientImpl: Waiting for application to be successfully unregistered. 16/06/09 03:48:03 INFO RemoteActorRefProvider$RemotingTerminator: Remoting shut down. 16/06/09 03:48:03 INFO ApplicationMaster: Deleting staging directory .sparkStaging/application_1465316720094_0027 16/06/09 03:48:03 INFO ShutdownHookManager: Shutdown hook called 16/06/09 03:48:03 INFO ShutdownHookManager: Deleting directory /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/spark-3c0998f5-d4fa-41b9-87b9-ca2ddb76ba05 16/06/09 03:48:03 INFO ShutdownHookManager: Deleting directory /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/spark-3c0998f5-d4fa-41b9-87b9-ca2ddb76ba05/pyspark-f01b9a42-6b7f-4c66-b02a-466ed36f541d 16/06/09 03:48:03 INFO MetricsSystemImpl: Stopping azure-file-system metrics system... 16/06/09 03:48:03 INFO MetricsSinkAdapter: azurefs2 thread interrupted. 16/06/09 03:48:03 INFO MetricsSystemImpl: azure-file-system metrics system stopped. 16/06/09 03:48:03 INFO MetricsSystemImpl: azure-file-system metrics system shutdown complete. End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:224 Log Contents: driver: 3.4.3 (default, Oct 14 2015, 20:28:29) [GCC 4.8.4] PYTHONHASHSEED=0 PYSPARK_PYTHON=python3 executors: 3.4.3 (default, Oct 14 2015, 20:28:29) [GCC 4.8.4] PYTHONHASHSEED=0 PYSPARK_PYTHON=/usr/bin/anaconda/bin/python End of LogType:stdout Container: container_e05_1465316720094_0027_01_000002 on 10.0.0.10_30050 ========================================================================== LogType:directory.info Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:4767 Log Contents: ls -l: total 28 -rw------- 1 nobody hadoop 103 Jun 9 03:47 container_tokens -rwx------ 1 nobody hadoop 7145 Jun 9 03:47 launch_container.sh lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:47 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/194/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:47 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/193/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:47 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/195/__spark_conf__6294409567093172894.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:47 tmp find -L . -maxdepth 5 -ls: 3539234 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:47 . 3539237 8 -rwx------ 1 nobody hadoop 7145 Jun 9 03:47 ./launch_container.sh 3539238 4 -rw------- 1 nobody hadoop 103 Jun 9 03:47 ./container_tokens 3539061 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:47 ./__spark_conf__ 3539213 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:47 ./__spark_conf__/ssl-server.xml 3539198 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:47 ./__spark_conf__/mapred-site.xml.template 3539175 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:47 ./__spark_conf__/log4j.properties 3539216 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:47 ./__spark_conf__/mapred-queues.xml.template 3539215 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:47 ./__spark_conf__/taskcontroller.cfg 3539195 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:47 ./__spark_conf__/ssl-client.xml 3539210 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:47 ./__spark_conf__/slaves 3539214 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:47 ./__spark_conf__/ssl-server.xml.example 3539203 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:47 ./__spark_conf__/commons-logging.properties 3539176 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2.properties 3539191 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539200 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:47 ./__spark_conf__/yarn-env.sh 3539062 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:47 ./__spark_conf__/mapred-site.xml 3539202 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:47 ./__spark_conf__/container-executor.cfg 3539209 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:47 ./__spark_conf__/hadoop-policy.xml 3539193 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:47 ./__spark_conf__/core-site.xml 3539205 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:47 ./__spark_conf__/hadoop-metrics.properties 3539189 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539196 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:47 ./__spark_conf__/capacity-scheduler.xml 3539212 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:47 ./__spark_conf__/metrics.properties 3539197 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:47 ./__spark_conf__/topology_script.py 3539208 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:47 ./__spark_conf__/ssl-client.xml.example 3539217 4 -r-x------ 1 nobody nogroup 2431 Jun 9 03:47 ./__spark_conf__/__spark_conf__.properties 3539199 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:47 ./__spark_conf__/configuration.xsl 3539206 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:47 ./__spark_conf__/mapred-env.sh 3539204 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:47 ./__spark_conf__/task-log4j.properties 3539194 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:47 ./__spark_conf__/dfs.exclude 3539201 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:47 ./__spark_conf__/hdfs-site.xml 3539211 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:47 ./__spark_conf__/topology_mappings.data 3539190 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:47 ./__spark_conf__/yarn-site.xml 3539207 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:47 ./__spark_conf__/health_check 3539192 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:47 ./__spark_conf__/yarn.exclude 3539174 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:47 ./__spark_conf__/hadoop-env.sh 3539056 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:47 ./py4j-0.9-src.zip 3539236 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:47 ./tmp 3538961 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:47 ./pyspark.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:7145 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0027" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip#__spark_conf__" export SPARK_LOG_URL_STDERR="http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0027_01_000002/KevinGre/stderr?start=-4096" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.10" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105560" export JVM_PID="$$" export SPARK_HOME="/usr/hdp/current/spark-client" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465444064000,1465444064000" export SPARK_USER="KevinGre" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002" export NM_PORT="30050" export LOGNAME="KevinGre" export SPARK_LOG_URL_STDOUT="http://10.0.0.10:30060/node/containerlogs/container_e05_1465316720094_0027_01_000002/KevinGre/stdout?start=-4096" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export NM_HTTP_PORT="30060" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465444064000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/container_tokens" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export CONTAINER_ID="container_e05_1465316720094_0027_01_000002" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/193/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/194/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/195/__spark_conf__6294409567093172894.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir=$PWD/tmp '-Dspark.driver.port=38830' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002 -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:38830 --executor-id 1 --hostname 10.0.0.10 --cores 2 --app-id application_1465316720094_0027 --user-class-path file:$PWD/__app__.jar 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000002/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:11429 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:47:54 INFO CoarseGrainedExecutorBackend: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:47:55 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:47:55 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:47:55 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:47:55 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:47:56 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:47:56 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:47:56 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:47:56 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:47:56 INFO Remoting: Starting remoting 16/06/09 03:47:56 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkExecutorActorSystem@10.0.0.10:42799] 16/06/09 03:47:56 INFO Utils: Successfully started service 'sparkExecutorActorSystem' on port 42799. 16/06/09 03:47:56 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/blockmgr-44a1ec77-0a77-4ca9-ade6-906434fe590d 16/06/09 03:47:56 INFO MemoryStore: MemoryStore started with capacity 4.1 GB 16/06/09 03:47:56 INFO CoarseGrainedExecutorBackend: Connecting to driver: spark://CoarseGrainedScheduler@10.0.0.10:38830 16/06/09 03:47:57 INFO CoarseGrainedExecutorBackend: Successfully registered with driver 16/06/09 03:47:57 INFO Executor: Starting executor ID 1 on host 10.0.0.10 16/06/09 03:47:57 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 42372. 16/06/09 03:47:57 INFO NettyBlockTransferService: Server created on 42372 16/06/09 03:47:57 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:47:57 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:48:01 INFO CoarseGrainedExecutorBackend: Got assigned task 0 16/06/09 03:48:01 INFO CoarseGrainedExecutorBackend: Got assigned task 2 16/06/09 03:48:01 INFO Executor: Running task 2.0 in stage 0.0 (TID 2) 16/06/09 03:48:01 INFO Executor: Running task 0.0 in stage 0.0 (TID 0) 16/06/09 03:48:01 INFO TorrentBroadcast: Started reading broadcast variable 0 16/06/09 03:48:01 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.7 KB, free 2.7 KB) 16/06/09 03:48:01 INFO TorrentBroadcast: Reading broadcast variable 0 took 134 ms 16/06/09 03:48:02 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 4.1 KB, free 6.8 KB) 16/06/09 03:48:02 INFO PythonRunner: Times: total = 525, boot = 513, init = 12, finish = 0 16/06/09 03:48:02 INFO PythonRunner: Times: total = 525, boot = 510, init = 15, finish = 0 16/06/09 03:48:02 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 963 bytes result sent to driver 16/06/09 03:48:02 INFO Executor: Finished task 2.0 in stage 0.0 (TID 2). 963 bytes result sent to driver 16/06/09 03:48:03 INFO CoarseGrainedExecutorBackend: Driver commanded a shutdown 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.file.largeRead_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.file.read_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.file.read_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.file.write_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.file.write_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.hdfs.largeRead_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.hdfs.read_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.hdfs.read_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.hdfs.write_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.filesystem.hdfs.write_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.threadpool.activeTasks, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.threadpool.completeTasks, value=2 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.threadpool.currentPool_size, value=2 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.executor.threadpool.maxPool_size, value=2147483647 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.PS-MarkSweep.count, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.PS-MarkSweep.time, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.PS-Scavenge.count, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.PS-Scavenge.time, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.heap.committed, value=6174015488 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.heap.init, value=6442450944 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.heap.max, value=6174015488 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.heap.usage, value=0.24525637859883515 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.heap.used, value=1514216680 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.non-heap.committed, value=45547520 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.non-heap.init, value=24576000 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.non-heap.max, value=318767104 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.non-heap.usage, value=0.13829585125571803 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.non-heap.used, value=44084168 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.Code-Cache.usage, value=0.030742645263671875 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.Code-Cache.used, value=1547328 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Eden-Space.committed, value=1610612736 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Eden-Space.init, value=1610612736 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Eden-Space.max, value=1610612736 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Eden-Space.usage, value=0.9401494512955347 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Eden-Space.used, value=1514216680 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Old-Gen.committed, value=4294967296 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Old-Gen.init, value=4294967296 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Old-Gen.max, value=4294967296 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Old-Gen.usage, value=0.0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Old-Gen.used, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Perm-Gen.committed, value=42991616 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Perm-Gen.usage, value=0.15846556425094604 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Perm-Gen.used, value=42537776 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Survivor-Space.committed, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Survivor-Space.init, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Survivor-Space.max, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Survivor-Space.usage, value=0.0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.pools.PS-Survivor-Space.used, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.total.committed, value=6219563008 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.total.init, value=6467026944 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.total.max, value=6492782592 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.1.jvm.total.used, value=1558304168 16/06/09 03:48:03 INFO MemoryStore: MemoryStore cleared 16/06/09 03:48:03 INFO BlockManager: BlockManager stopped 16/06/09 03:48:03 ERROR CoarseGrainedExecutorBackend: Driver 10.0.0.10:38830 disassociated! Shutting down. 16/06/09 03:48:03 INFO ShutdownHookManager: Shutdown hook called End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:48:05 +0000 2016 LogLength:0 Log Contents: End of LogType:stdout Container: container_e05_1465316720094_0027_01_000003 on 10.0.0.8_30050 ========================================================================= LogType:directory.info Log Upload Time:Thu Jun 09 03:48:04 +0000 2016 LogLength:4767 Log Contents: ls -l: total 28 -rw------- 1 nobody hadoop 103 Jun 9 03:47 container_tokens -rwx------ 1 nobody hadoop 7141 Jun 9 03:47 launch_container.sh lrwxrwxrwx 1 nobody hadoop 81 Jun 9 03:47 py4j-0.9-src.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/182/py4j-0.9-src.zip lrwxrwxrwx 1 nobody hadoop 76 Jun 9 03:47 pyspark.zip -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/181/pyspark.zip lrwxrwxrwx 1 nobody hadoop 102 Jun 9 03:47 __spark_conf__ -> /mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/183/__spark_conf__6294409567093172894.zip drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:47 tmp find -L . -maxdepth 5 -ls: 3539318 4 drwxr-s--- 3 nobody hadoop 4096 Jun 9 03:47 . 3539315 352 -r-x------ 1 nobody nogroup 357163 Jun 9 03:47 ./pyspark.zip 3539322 4 drwxr-s--- 2 nobody hadoop 4096 Jun 9 03:47 ./tmp 3539453 8 -rwx------ 1 nobody hadoop 7141 Jun 9 03:47 ./launch_container.sh 3539454 4 -rw------- 1 nobody hadoop 103 Jun 9 03:47 ./container_tokens 3539323 4 drwx------ 2 nobody nogroup 4096 Jun 9 03:47 ./__spark_conf__ 3539427 0 -r-x------ 1 nobody nogroup 0 Jun 9 03:47 ./__spark_conf__/yarn.exclude 3539445 4 -r-x------ 1 nobody nogroup 127 Jun 9 03:47 ./__spark_conf__/slaves 3539442 4 -r-x------ 1 nobody nogroup 1602 Jun 9 03:47 ./__spark_conf__/health_check 3539434 4 -r-x------ 1 nobody nogroup 1335 Jun 9 03:47 ./__spark_conf__/configuration.xsl 3539428 8 -r-x------ 1 nobody nogroup 6136 Jun 9 03:47 ./__spark_conf__/core-site.xml 3539431 4 -r-x------ 1 nobody nogroup 2444 Jun 9 03:47 ./__spark_conf__/capacity-scheduler.xml 3539450 4 -r-x------ 1 nobody nogroup 945 Jun 9 03:47 ./__spark_conf__/taskcontroller.cfg 3539436 12 -r-x------ 1 nobody nogroup 8543 Jun 9 03:47 ./__spark_conf__/hdfs-site.xml 3539426 4 -r-x------ 1 nobody nogroup 265 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2-azure-file-system.properties 3539325 8 -r-x------ 1 nobody nogroup 5693 Jun 9 03:47 ./__spark_conf__/hadoop-env.sh 3539422 4 -r-x------ 1 nobody nogroup 3156 Jun 9 03:47 ./__spark_conf__/log4j.properties 3539449 4 -r-x------ 1 nobody nogroup 2268 Jun 9 03:47 ./__spark_conf__/ssl-server.xml.example 3539424 4 -r-x------ 1 nobody nogroup 247 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2-adl-file-system.properties 3539452 4 -r-x------ 1 nobody nogroup 2431 Jun 9 03:47 ./__spark_conf__/__spark_conf__.properties 3539447 8 -r-x------ 1 nobody nogroup 5142 Jun 9 03:47 ./__spark_conf__/metrics.properties 3539446 4 -r-x------ 1 nobody nogroup 220 Jun 9 03:47 ./__spark_conf__/topology_mappings.data 3539324 8 -r-x------ 1 nobody nogroup 7579 Jun 9 03:47 ./__spark_conf__/mapred-site.xml 3539443 4 -r-x------ 1 nobody nogroup 2316 Jun 9 03:47 ./__spark_conf__/ssl-client.xml.example 3539429 4 -r-x------ 1 nobody nogroup 1 Jun 9 03:47 ./__spark_conf__/dfs.exclude 3539441 4 -r-x------ 1 nobody nogroup 757 Jun 9 03:47 ./__spark_conf__/mapred-env.sh 3539433 4 -r-x------ 1 nobody nogroup 758 Jun 9 03:47 ./__spark_conf__/mapred-site.xml.template 3539435 8 -r-x------ 1 nobody nogroup 4277 Jun 9 03:47 ./__spark_conf__/yarn-env.sh 3539432 4 -r-x------ 1 nobody nogroup 2358 Jun 9 03:47 ./__spark_conf__/topology_script.py 3539425 24 -r-x------ 1 nobody nogroup 20890 Jun 9 03:47 ./__spark_conf__/yarn-site.xml 3539423 8 -r-x------ 1 nobody nogroup 5640 Jun 9 03:47 ./__spark_conf__/hadoop-metrics2.properties 3539451 8 -r-x------ 1 nobody nogroup 4113 Jun 9 03:47 ./__spark_conf__/mapred-queues.xml.template 3539440 4 -r-x------ 1 nobody nogroup 2490 Jun 9 03:47 ./__spark_conf__/hadoop-metrics.properties 3539430 4 -r-x------ 1 nobody nogroup 744 Jun 9 03:47 ./__spark_conf__/ssl-client.xml 3539439 8 -r-x------ 1 nobody nogroup 4221 Jun 9 03:47 ./__spark_conf__/task-log4j.properties 3539444 4 -r-x------ 1 nobody nogroup 1308 Jun 9 03:47 ./__spark_conf__/hadoop-policy.xml 3539437 4 -r-x------ 1 nobody nogroup 1045 Jun 9 03:47 ./__spark_conf__/container-executor.cfg 3539448 4 -r-x------ 1 nobody nogroup 1000 Jun 9 03:47 ./__spark_conf__/ssl-server.xml 3539438 4 -r-x------ 1 nobody nogroup 1020 Jun 9 03:47 ./__spark_conf__/commons-logging.properties 3539319 44 -r-x------ 1 nobody nogroup 44846 Jun 9 03:47 ./py4j-0.9-src.zip broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Thu Jun 09 03:48:04 +0000 2016 LogLength:7141 Log Contents: #!/bin/bash export SPARK_YARN_MODE="true" export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1465316720094_0027" export JAVA_HOME="/usr/lib/jvm/java-7-openjdk-amd64" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PRIVATE,PRIVATE" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export SPARK_YARN_CACHE_ARCHIVES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/__spark_conf__6294409567093172894.zip#__spark_conf__" export SPARK_LOG_URL_STDERR="http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0027_01_000003/KevinGre/stderr?start=-4096" export HADOOP_YARN_HOME="/usr/hdp/current/hadoop-yarn-nodemanager" export NM_HOST="10.0.0.8" export PYTHONPATH="/usr/hdp/current/spark-client/python:/usr/hdp/current/spark-client/python/lib/py4j-0.9-src.zip:$PWD/pyspark.zip:$PWD/py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="105560" export JVM_PID="$$" export SPARK_HOME="/usr/hdp/current/spark-client" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1465444064000,1465444064000" export SPARK_USER="KevinGre" export PWD="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003" export NM_PORT="30050" export LOGNAME="KevinGre" export SPARK_LOG_URL_STDOUT="http://10.0.0.8:30060/node/containerlogs/container_e05_1465316720094_0027_01_000003/KevinGre/stdout?start=-4096" export MALLOC_ARENA_MAX="4" export LOG_DIRS="/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003" export SPARK_YARN_CACHE_FILES_FILE_SIZES="357163,44846" export NM_HTTP_PORT="30060" export LOCAL_DIRS="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_YARN_CACHE_FILES="wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/pyspark.zip#pyspark.zip,wasb://kevingre@wranglewestus.blob.core.windows.net/user/KevinGre/.sparkStaging/application_1465316720094_0027/py4j-0.9-src.zip#py4j-0.9-src.zip" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1465444064000" export SPARK_DIST_CLASSPATH=":/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export CLASSPATH="$PWD:$PWD/__spark_conf__:/usr/hdp/current/spark-client/lib/spark-assembly.jar:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.4.2.0-258/hadoop/lib/hadoop-lzo-0.6.0.2.4.2.0-258.jar:/etc/hadoop/conf/secure::/usr/hdp/current/spark-client/lib/*:/usr/lib/hdinsight-datalake/*:/usr/hdp/current/spark-client/conf/hive-site.xml" export HADOOP_TOKEN_FILE_LOCATION="/mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/container_tokens" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export USER="KevinGre" export CONTAINER_ID="container_e05_1465316720094_0027_01_000003" export HOME="/home/" export PYTHONHASHSEED="0" export HADOOP_CONF_DIR="/usr/hdp/current/hadoop-client/conf" ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/183/__spark_conf__6294409567093172894.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/182/py4j-0.9-src.zip" "py4j-0.9-src.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/mnt/resource/hadoop/yarn/local/usercache/KevinGre/filecache/181/pyspark.zip" "pyspark.zip" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/launch_container.sh" chmod 640 "/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/directory.info" ls -l 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/directory.info" find -L . -maxdepth 5 -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -XX:OnOutOfMemoryError='kill %p' -Xms6144m -Xmx6144m '-Dhdp.version=' '-Detwlogger.component=sparkexecutor' '-DlogFilter.filename=SparkLogFilters.xml' '-DpatternGroup.filename=SparkPatternGroups.xml' '-Dlog4jspark.root.logger=INFO,console,DRFA,ETW,Anonymizer' '-Dlog4jspark.log.dir=/var/log/sparkapp' '-Dlog4jspark.log.file=sparkexecutor_\${user.name}.log' '-Dlog4j.configuration=file:/usr/hdp/current/spark-client/conf/log4j.properties' '-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl' -Djava.io.tmpdir=$PWD/tmp '-Dspark.driver.port=38830' '-Dspark.history.ui.port=18080' '-Dspark.ui.port=0' -Dspark.yarn.app.container.log.dir=/mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003 -XX:MaxPermSize=256m org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.0.0.10:38830 --executor-id 2 --hostname 10.0.0.8 --cores 2 --app-id application_1465316720094_0027 --user-class-path file:$PWD/__app__.jar 1> /mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/stdout 2> /mnt/resource/hadoop/yarn/log/application_1465316720094_0027/container_e05_1465316720094_0027_01_000003/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Thu Jun 09 03:48:04 +0000 2016 LogLength:11658 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 16/06/09 03:47:58 INFO CoarseGrainedExecutorBackend: Registered signal handlers for [TERM, HUP, INT] 16/06/09 03:47:59 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 16/06/09 03:47:59 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:47:59 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:47:59 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:48:00 INFO SecurityManager: Changing view acls to: nobody,KevinGre 16/06/09 03:48:00 INFO SecurityManager: Changing modify acls to: nobody,KevinGre 16/06/09 03:48:00 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(nobody, KevinGre); users with modify permissions: Set(nobody, KevinGre) 16/06/09 03:48:00 INFO Slf4jLogger: Slf4jLogger started 16/06/09 03:48:00 INFO Remoting: Starting remoting 16/06/09 03:48:00 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkExecutorActorSystem@10.0.0.8:42947] 16/06/09 03:48:00 INFO Utils: Successfully started service 'sparkExecutorActorSystem' on port 42947. 16/06/09 03:48:00 INFO DiskBlockManager: Created local directory at /mnt/resource/hadoop/yarn/local/usercache/KevinGre/appcache/application_1465316720094_0027/blockmgr-016f9533-39de-4a51-9b76-b0c6f92bc48d 16/06/09 03:48:00 INFO MemoryStore: MemoryStore started with capacity 4.1 GB 16/06/09 03:48:01 INFO CoarseGrainedExecutorBackend: Connecting to driver: spark://CoarseGrainedScheduler@10.0.0.10:38830 16/06/09 03:48:01 INFO CoarseGrainedExecutorBackend: Successfully registered with driver 16/06/09 03:48:01 INFO Executor: Starting executor ID 2 on host 10.0.0.8 16/06/09 03:48:01 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 38225. 16/06/09 03:48:01 INFO NettyBlockTransferService: Server created on 38225 16/06/09 03:48:01 INFO BlockManagerMaster: Trying to register BlockManager 16/06/09 03:48:01 INFO BlockManagerMaster: Registered BlockManager 16/06/09 03:48:01 INFO CoarseGrainedExecutorBackend: Got assigned task 1 16/06/09 03:48:01 INFO CoarseGrainedExecutorBackend: Got assigned task 3 16/06/09 03:48:01 INFO Executor: Running task 3.0 in stage 0.0 (TID 3) 16/06/09 03:48:01 INFO Executor: Running task 1.0 in stage 0.0 (TID 1) 16/06/09 03:48:01 INFO TorrentBroadcast: Started reading broadcast variable 0 16/06/09 03:48:01 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.7 KB, free 2.7 KB) 16/06/09 03:48:01 INFO TorrentBroadcast: Reading broadcast variable 0 took 123 ms 16/06/09 03:48:02 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 4.1 KB, free 6.8 KB) 16/06/09 03:48:02 INFO PythonRunner: Times: total = 557, boot = 537, init = 19, finish = 1 16/06/09 03:48:02 INFO PythonRunner: Times: total = 565, boot = 535, init = 30, finish = 0 16/06/09 03:48:02 INFO Executor: Finished task 3.0 in stage 0.0 (TID 3). 1111 bytes result sent to driver 16/06/09 03:48:02 INFO Executor: Finished task 1.0 in stage 0.0 (TID 1). 962 bytes result sent to driver 16/06/09 03:48:03 INFO CoarseGrainedExecutorBackend: Driver commanded a shutdown 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.file.largeRead_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.file.read_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.file.read_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.file.write_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.file.write_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.hdfs.largeRead_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.hdfs.read_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.hdfs.read_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.hdfs.write_bytes, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.filesystem.hdfs.write_ops, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.threadpool.activeTasks, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.threadpool.completeTasks, value=2 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.threadpool.currentPool_size, value=2 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.executor.threadpool.maxPool_size, value=2147483647 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.PS-MarkSweep.count, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.PS-MarkSweep.time, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.PS-Scavenge.count, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.PS-Scavenge.time, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.heap.committed, value=6174015488 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.heap.init, value=6442450944 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.heap.max, value=6174015488 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.heap.usage, value=0.25570756326551025 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.heap.used, value=1578742456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.non-heap.committed, value=45547520 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.non-heap.init, value=24576000 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.non-heap.max, value=318767104 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.non-heap.usage, value=0.13823865589342618 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.non-heap.used, value=44065936 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.Code-Cache.committed, value=2555904 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.Code-Cache.init, value=2555904 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.Code-Cache.max, value=50331648 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.Code-Cache.usage, value=0.030375162760416668 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.Code-Cache.used, value=1528832 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Eden-Space.committed, value=1610612736 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Eden-Space.init, value=1610612736 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Eden-Space.max, value=1610612736 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Eden-Space.usage, value=0.9802123258511225 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Eden-Space.used, value=1578742456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Old-Gen.committed, value=4294967296 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Old-Gen.init, value=4294967296 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Old-Gen.max, value=4294967296 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Old-Gen.usage, value=0.0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Old-Gen.used, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Perm-Gen.committed, value=42991616 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Perm-Gen.init, value=22020096 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Perm-Gen.max, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Perm-Gen.usage, value=0.15846654772758484 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Perm-Gen.used, value=42538040 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Survivor-Space.committed, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Survivor-Space.init, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Survivor-Space.max, value=268435456 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Survivor-Space.usage, value=0.0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.pools.PS-Survivor-Space.used, value=0 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.total.committed, value=6219563008 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.total.init, value=6467026944 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.total.max, value=6492782592 16/06/09 03:48:03 INFO metrics: type=GAUGE, name=application_1465316720094_0027.2.jvm.total.used, value=1622814272 16/06/09 03:48:03 INFO MemoryStore: MemoryStore cleared 16/06/09 03:48:03 INFO BlockManager: BlockManager stopped 16/06/09 03:48:03 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon. 16/06/09 03:48:03 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports. 16/06/09 03:48:03 ERROR CoarseGrainedExecutorBackend: Driver 10.0.0.10:38830 disassociated! Shutting down. 16/06/09 03:48:03 INFO ShutdownHookManager: Shutdown hook called End of LogType:stderr LogType:stdout Log Upload Time:Thu Jun 09 03:48:04 +0000 2016 LogLength:0 Log Contents: End of LogType:stdout KevinGre@hn0-keving:~/probe$