SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/mnt/yarn/usercache/hadoop/filecache/2159/__spark_libs__3027229192936044972.zip/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/lib/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 17/02/28 10:47:59 INFO SignalUtils: Registered signal handler for TERM 17/02/28 10:47:59 INFO SignalUtils: Registered signal handler for HUP 17/02/28 10:47:59 INFO SignalUtils: Registered signal handler for INT 17/02/28 10:48:00 INFO ApplicationMaster: Preparing Local resources 17/02/28 10:48:01 INFO ApplicationMaster: Prepared Local resources Map(org.joda_joda-convert-1.2.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/org.joda_joda-convert-1.2.jar" } size: 38460 timestamp: 1488278877363 type: FILE visibility: PRIVATE, livy-repl_2.11-0.3.1-SNAPSHOT.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/livy-repl_2.11-0.3.1-SNAPSHOT.jar" } size: 944243 timestamp: 1488278876773 type: FILE visibility: PRIVATE, org.mongodb_mongo-java-driver-3.2.2.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/org.mongodb_mongo-java-driver-3.2.2.jar" } size: 1484724 timestamp: 1488278877426 type: FILE visibility: PRIVATE, __spark_libs__ -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/__spark_libs__3027229192936044972.zip" } size: 194322492 timestamp: 1488278876579 type: ARCHIVE visibility: PRIVATE, livy-rsc-0.3.1-SNAPSHOT.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/livy-rsc-0.3.1-SNAPSHOT.jar" } size: 484707 timestamp: 1488278876707 type: FILE visibility: PRIVATE, io.netty_netty-all-4.0.33.Final.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/io.netty_netty-all-4.0.33.Final.jar" } size: 2112017 timestamp: 1488278877303 type: FILE visibility: PRIVATE, commons-beanutils_commons-beanutils-1.8.0.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/commons-beanutils_commons-beanutils-1.8.0.jar" } size: 231320 timestamp: 1488278877317 type: FILE visibility: PRIVATE, livy-api-0.3.1-SNAPSHOT.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/livy-api-0.3.1-SNAPSHOT.jar" } size: 9098 timestamp: 1488278876725 type: FILE visibility: PRIVATE, netty-all-4.0.29.Final.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/netty-all-4.0.29.Final.jar" } size: 2054931 timestamp: 1488278876687 type: FILE visibility: PRIVATE, org.scala-lang_scala-reflect-2.11.8.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/org.scala-lang_scala-reflect-2.11.8.jar" } size: 4573750 timestamp: 1488278877403 type: FILE visibility: PRIVATE, __spark_conf__ -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/__spark_conf__.zip" } size: 78348 timestamp: 1488278877459 type: ARCHIVE visibility: PRIVATE, com.datastax.spark_spark-cassandra-connector_2.11-2.0.0-M3.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/com.datastax.spark_spark-cassandra-connector_2.11-2.0.0-M3.jar" } size: 6036068 timestamp: 1488278876841 type: FILE visibility: PRIVATE, commons-codec-1.9.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/commons-codec-1.9.jar" } size: 263965 timestamp: 1488278876748 type: FILE visibility: PRIVATE, org.mongodb.spark_mongo-spark-connector_2.10-2.0.0.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/org.mongodb.spark_mongo-spark-connector_2.10-2.0.0.jar" } size: 659989 timestamp: 1488278877278 type: FILE visibility: PRIVATE, com.twitter_jsr166e-1.1.0.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/com.twitter_jsr166e-1.1.0.jar" } size: 62226 timestamp: 1488278877348 type: FILE visibility: PRIVATE, joda-time_joda-time-2.3.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/joda-time_joda-time-2.3.jar" } size: 581571 timestamp: 1488278877336 type: FILE visibility: PRIVATE, livy-core_2.11-0.3.1-SNAPSHOT.jar -> resource { scheme: "hdfs" host: "ip-10-200-139-129.ec2.internal" port: 8020 file: "/user/hadoop/.sparkStaging/application_1482343367445_0211/livy-core_2.11-0.3.1-SNAPSHOT.jar" } size: 88646 timestamp: 1488278876792 type: FILE visibility: PRIVATE) 17/02/28 10:48:01 INFO ApplicationMaster: ApplicationAttemptId: appattempt_1482343367445_0211_000001 17/02/28 10:48:01 INFO SecurityManager: Changing view acls to: yarn,hadoop 17/02/28 10:48:01 INFO SecurityManager: Changing modify acls to: yarn,hadoop 17/02/28 10:48:01 INFO SecurityManager: Changing view acls groups to: 17/02/28 10:48:01 INFO SecurityManager: Changing modify acls groups to: 17/02/28 10:48:01 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(yarn, hadoop); groups with view permissions: Set(); users with modify permissions: Set(yarn, hadoop); groups with modify permissions: Set() 17/02/28 10:48:02 INFO ApplicationMaster: Waiting for Spark driver to be reachable. 17/02/28 10:48:02 INFO ApplicationMaster: Driver now available: 10.200.139.129:41666 17/02/28 10:48:02 INFO TransportClientFactory: Successfully created connection to /10.200.139.129:41666 after 80 ms (0 ms spent in bootstraps) 17/02/28 10:48:02 INFO ApplicationMaster$AMEndpoint: Add WebUI Filter. AddWebUIFilter(org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter,Map(PROXY_HOSTS -> ip-10-200-139-129.ec2.internal, PROXY_URI_BASES -> http://ip-10-200-139-129.ec2.internal:20888/proxy/application_1482343367445_0211),/proxy/application_1482343367445_0211) 17/02/28 10:48:02 INFO RMProxy: Connecting to ResourceManager at ip-10-200-139-129.ec2.internal/10.200.139.129:8030 17/02/28 10:48:02 INFO YarnRMClient: Registering the ApplicationMaster 17/02/28 10:48:02 INFO Utils: Using initial executors = 1, max of spark.dynamicAllocation.initialExecutors, spark.dynamicAllocation.minExecutors and spark.executor.instances 17/02/28 10:48:02 INFO YarnAllocator: Will request 1 executor containers, each with 4 cores and 5632 MB memory including 512 MB overhead 17/02/28 10:48:02 INFO YarnAllocator: Canceled 0 container requests (locality no longer needed) 17/02/28 10:48:02 INFO YarnAllocator: Submitted container request (host: Any, capability: ) 17/02/28 10:48:02 INFO ApplicationMaster: Started progress reporter thread with (heartbeat : 3000, initial allocation : 200) intervals 17/02/28 10:48:02 INFO AMRMClientImpl: Received new token for : ip-10-200-139-13.ec2.internal:8041 17/02/28 10:48:02 INFO YarnAllocator: Launching container container_1482343367445_0211_01_000002 for on host ip-10-200-139-13.ec2.internal 17/02/28 10:48:02 INFO YarnAllocator: Launching ExecutorRunnable. driverUrl: spark://CoarseGrainedScheduler@10.200.139.129:41666, executorHostname: ip-10-200-139-13.ec2.internal 17/02/28 10:48:02 INFO YarnAllocator: Received 1 containers from YARN, launching executors on 1 of them. 17/02/28 10:48:02 INFO ExecutorRunnable: Starting Executor Container 17/02/28 10:48:02 INFO ContainerManagementProtocolProxy: yarn.client.max-cached-nodemanagers-proxies : 0 17/02/28 10:48:02 INFO ExecutorRunnable: Setting up ContainerLaunchContext 17/02/28 10:48:02 INFO ExecutorRunnable: =============================================================================== YARN executor launch context: env: CLASSPATH -> /usr/lib/hadoop-lzo/lib/*:/usr/lib/hadoop/hadoop-aws.jar:/usr/share/aws/aws-java-sdk/*:/usr/share/aws/emr/emrfs/conf:/usr/share/aws/emr/emrfs/lib/*:/usr/share/aws/emr/emrfs/auxlib/*:/usr/share/aws/emr/security/conf:/usr/share/aws/emr/security/lib/*{{PWD}}{{PWD}}/__spark_conf__{{PWD}}/__spark_libs__/*$HADOOP_CONF_DIR$HADOOP_COMMON_HOME/*$HADOOP_COMMON_HOME/lib/*$HADOOP_HDFS_HOME/*$HADOOP_HDFS_HOME/lib/*$HADOOP_MAPRED_HOME/*$HADOOP_MAPRED_HOME/lib/*$HADOOP_YARN_HOME/*$HADOOP_YARN_HOME/lib/*/usr/lib/hadoop-lzo/lib/*/usr/share/aws/emr/emrfs/conf/usr/share/aws/emr/emrfs/lib/*/usr/share/aws/emr/emrfs/auxlib/*/usr/share/aws/emr/lib/*/usr/share/aws/emr/ddb/lib/emr-ddb-hadoop.jar/usr/share/aws/emr/goodies/lib/emr-hadoop-goodies.jar/usr/share/aws/emr/kinesis/lib/emr-kinesis-hadoop.jar/usr/lib/spark/yarn/lib/datanucleus-api-jdo.jar/usr/lib/spark/yarn/lib/datanucleus-core.jar/usr/lib/spark/yarn/lib/datanucleus-rdbms.jar/usr/share/aws/emr/cloudwatch-sink/lib/*$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*/usr/lib/hadoop-lzo/lib/*/usr/share/aws/emr/emrfs/conf/usr/share/aws/emr/emrfs/lib/*/usr/share/aws/emr/emrfs/auxlib/*/usr/share/aws/emr/lib/*/usr/share/aws/emr/ddb/lib/emr-ddb-hadoop.jar/usr/share/aws/emr/goodies/lib/emr-hadoop-goodies.jar/usr/share/aws/emr/kinesis/lib/emr-kinesis-hadoop.jar/usr/share/aws/emr/cloudwatch-sink/lib/* SPARK_LOG_URL_STDERR -> http://ip-10-200-139-13.ec2.internal:8042/node/containerlogs/container_1482343367445_0211_01_000002/hadoop/stderr?start=-4096 SPARK_YARN_STAGING_DIR -> hdfs://ip-10-200-139-129.ec2.internal:8020/user/hadoop/.sparkStaging/application_1482343367445_0211 SPARK_USER -> hadoop SPARK_YARN_MODE -> true SPARK_LOG_URL_STDOUT -> http://ip-10-200-139-13.ec2.internal:8042/node/containerlogs/container_1482343367445_0211_01_000002/hadoop/stdout?start=-4096 command: LD_LIBRARY_PATH="/usr/lib/hadoop/lib/native:/usr/lib/hadoop-lzo/lib/native:$LD_LIBRARY_PATH" {{JAVA_HOME}}/bin/java -server -Xmx5120m '-verbose:gc' '-XX:+PrintGCDetails' '-XX:+PrintGCDateStamps' '-XX:+UseConcMarkSweepGC' '-XX:CMSInitiatingOccupancyFraction=70' '-XX:MaxHeapFreeRatio=70' '-XX:+CMSClassUnloadingEnabled' '-XX:OnOutOfMemoryError=kill -9 %p' -Djava.io.tmpdir={{PWD}}/tmp '-Dspark.history.ui.port=18080' '-Dspark.driver.port=41666' -Dspark.yarn.app.container.log.dir= org.apache.spark.executor.CoarseGrainedExecutorBackend --driver-url spark://CoarseGrainedScheduler@10.200.139.129:41666 --executor-id 1 --hostname ip-10-200-139-13.ec2.internal --cores 4 --app-id application_1482343367445_0211 --user-class-path file:$PWD/__app__.jar --user-class-path file:$PWD/netty-all-4.0.29.Final.jar --user-class-path file:$PWD/livy-rsc-0.3.1-SNAPSHOT.jar --user-class-path file:$PWD/livy-api-0.3.1-SNAPSHOT.jar --user-class-path file:$PWD/commons-codec-1.9.jar --user-class-path file:$PWD/livy-repl_2.11-0.3.1-SNAPSHOT.jar --user-class-path file:$PWD/livy-core_2.11-0.3.1-SNAPSHOT.jar --user-class-path file:$PWD/com.datastax.spark_spark-cassandra-connector_2.11-2.0.0-M3.jar --user-class-path file:$PWD/org.mongodb.spark_mongo-spark-connector_2.10-2.0.0.jar --user-class-path file:$PWD/io.netty_netty-all-4.0.33.Final.jar --user-class-path file:$PWD/commons-beanutils_commons-beanutils-1.8.0.jar --user-class-path file:$PWD/joda-time_joda-time-2.3.jar --user-class-path file:$PWD/com.twitter_jsr166e-1.1.0.jar --user-class-path file:$PWD/org.joda_joda-convert-1.2.jar --user-class-path file:$PWD/org.scala-lang_scala-reflect-2.11.8.jar --user-class-path file:$PWD/org.mongodb_mongo-java-driver-3.2.2.jar 1> /stdout 2> /stderr =============================================================================== 17/02/28 10:48:02 INFO ContainerManagementProtocolProxy: Opening proxy : ip-10-200-139-13.ec2.internal:8041 17/02/28 10:48:05 INFO YarnAllocator: Driver requested a total number of 0 executor(s). 17/02/28 10:48:05 INFO YarnAllocator: Canceling requests for 0 executor container(s) to have a new desired total 0 executors. 17/02/28 10:48:05 WARN YarnAllocator: Expected to find pending requests, but found none. 17/02/28 10:48:05 INFO ApplicationMaster$AMEndpoint: Driver terminated or disconnected! Shutting down. 10.200.139.129:41666 17/02/28 10:48:05 INFO ApplicationMaster$AMEndpoint: Driver terminated or disconnected! Shutting down. 10.200.139.129:41666 17/02/28 10:48:05 INFO ApplicationMaster: Final app status: SUCCEEDED, exitCode: 0 17/02/28 10:48:05 INFO ApplicationMaster: Unregistering ApplicationMaster with SUCCEEDED 17/02/28 10:48:05 INFO AMRMClientImpl: Waiting for application to be successfully unregistered. 17/02/28 10:48:06 INFO ApplicationMaster: Deleting staging directory hdfs://ip-10-200-139-129.ec2.internal:8020/user/hadoop/.sparkStaging/application_1482343367445_0211 17/02/28 10:48:06 INFO ShutdownHookManager: Shutdown hook called