diff --git a/dbscan.ipynb b/dbscan.ipynb new file mode 100644 index 0000000..44adbce --- /dev/null +++ b/dbscan.ipynb @@ -0,0 +1,11362 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "42deea39-24b4-4d66-bbe6-7c925ddc360f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[('spark.stage.maxConsecutiveAttempts', '10'),\n", + " ('spark.dynamicAllocation.minExecutors', '1'),\n", + " ('spark.eventLog.enabled', 'true'),\n", + " ('spark.submit.pyFiles',\n", + " '/root/.ivy2/jars/com.johnsnowlabs.nlp_spark-nlp_2.12-4.4.0.jar,/root/.ivy2/jars/graphframes_graphframes-0.8.2-spark3.1-s_2.12.jar,/root/.ivy2/jars/com.typesafe_config-1.4.2.jar,/root/.ivy2/jars/org.rocksdb_rocksdbjni-6.29.5.jar,/root/.ivy2/jars/com.amazonaws_aws-java-sdk-bundle-1.11.828.jar,/root/.ivy2/jars/com.github.universal-automata_liblevenshtein-3.0.0.jar,/root/.ivy2/jars/com.google.cloud_google-cloud-storage-2.16.0.jar,/root/.ivy2/jars/com.navigamez_greex-1.0.jar,/root/.ivy2/jars/com.johnsnowlabs.nlp_tensorflow-cpu_2.12-0.4.4.jar,/root/.ivy2/jars/it.unimi.dsi_fastutil-7.0.12.jar,/root/.ivy2/jars/org.projectlombok_lombok-1.16.8.jar,/root/.ivy2/jars/com.google.guava_guava-31.1-jre.jar,/root/.ivy2/jars/com.google.guava_failureaccess-1.0.1.jar,/root/.ivy2/jars/com.google.guava_listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar,/root/.ivy2/jars/com.google.errorprone_error_prone_annotations-2.16.jar,/root/.ivy2/jars/com.google.j2objc_j2objc-annotations-1.3.jar,/root/.ivy2/jars/com.google.http-client_google-http-client-1.42.3.jar,/root/.ivy2/jars/io.opencensus_opencensus-contrib-http-util-0.31.1.jar,/root/.ivy2/jars/com.google.http-client_google-http-client-jackson2-1.42.3.jar,/root/.ivy2/jars/com.google.http-client_google-http-client-gson-1.42.3.jar,/root/.ivy2/jars/com.google.api-client_google-api-client-2.1.1.jar,/root/.ivy2/jars/commons-codec_commons-codec-1.15.jar,/root/.ivy2/jars/com.google.oauth-client_google-oauth-client-1.34.1.jar,/root/.ivy2/jars/com.google.http-client_google-http-client-apache-v2-1.42.3.jar,/root/.ivy2/jars/com.google.apis_google-api-services-storage-v1-rev20220705-2.0.0.jar,/root/.ivy2/jars/com.google.code.gson_gson-2.10.jar,/root/.ivy2/jars/com.google.cloud_google-cloud-core-2.9.0.jar,/root/.ivy2/jars/com.google.auto.value_auto-value-annotations-1.10.1.jar,/root/.ivy2/jars/com.google.cloud_google-cloud-core-http-2.9.0.jar,/root/.ivy2/jars/com.google.http-client_google-http-client-appengine-1.42.3.jar,/root/.ivy2/jars/com.google.api_gax-httpjson-0.105.1.jar,/root/.ivy2/jars/com.google.cloud_google-cloud-core-grpc-2.9.0.jar,/root/.ivy2/jars/io.grpc_grpc-core-1.51.0.jar,/root/.ivy2/jars/com.google.api_gax-2.20.1.jar,/root/.ivy2/jars/com.google.api_gax-grpc-2.20.1.jar,/root/.ivy2/jars/io.grpc_grpc-alts-1.51.0.jar,/root/.ivy2/jars/io.grpc_grpc-grpclb-1.51.0.jar,/root/.ivy2/jars/org.conscrypt_conscrypt-openjdk-uber-2.5.2.jar,/root/.ivy2/jars/io.grpc_grpc-protobuf-1.51.0.jar,/root/.ivy2/jars/com.google.auth_google-auth-library-credentials-1.13.0.jar,/root/.ivy2/jars/com.google.auth_google-auth-library-oauth2-http-1.13.0.jar,/root/.ivy2/jars/com.google.api_api-common-2.2.2.jar,/root/.ivy2/jars/javax.annotation_javax.annotation-api-1.3.2.jar,/root/.ivy2/jars/io.opencensus_opencensus-api-0.31.1.jar,/root/.ivy2/jars/io.grpc_grpc-context-1.51.0.jar,/root/.ivy2/jars/com.google.api.grpc_proto-google-iam-v1-1.6.22.jar,/root/.ivy2/jars/com.google.protobuf_protobuf-java-3.21.10.jar,/root/.ivy2/jars/com.google.protobuf_protobuf-java-util-3.21.10.jar,/root/.ivy2/jars/com.google.api.grpc_proto-google-common-protos-2.11.0.jar,/root/.ivy2/jars/org.threeten_threetenbp-1.6.4.jar,/root/.ivy2/jars/com.google.api.grpc_proto-google-cloud-storage-v2-2.16.0-alpha.jar,/root/.ivy2/jars/com.google.api.grpc_grpc-google-cloud-storage-v2-2.16.0-alpha.jar,/root/.ivy2/jars/com.google.api.grpc_gapic-google-cloud-storage-v2-2.16.0-alpha.jar,/root/.ivy2/jars/com.fasterxml.jackson.core_jackson-core-2.14.1.jar,/root/.ivy2/jars/com.google.code.findbugs_jsr305-3.0.2.jar,/root/.ivy2/jars/io.grpc_grpc-api-1.51.0.jar,/root/.ivy2/jars/io.grpc_grpc-auth-1.51.0.jar,/root/.ivy2/jars/io.grpc_grpc-stub-1.51.0.jar,/root/.ivy2/jars/org.checkerframework_checker-qual-3.28.0.jar,/root/.ivy2/jars/com.google.api.grpc_grpc-google-iam-v1-1.6.22.jar,/root/.ivy2/jars/io.grpc_grpc-protobuf-lite-1.51.0.jar,/root/.ivy2/jars/com.google.android_annotations-4.1.1.4.jar,/root/.ivy2/jars/org.codehaus.mojo_animal-sniffer-annotations-1.22.jar,/root/.ivy2/jars/io.grpc_grpc-netty-shaded-1.51.0.jar,/root/.ivy2/jars/io.perfmark_perfmark-api-0.26.0.jar,/root/.ivy2/jars/io.grpc_grpc-googleapis-1.51.0.jar,/root/.ivy2/jars/io.grpc_grpc-xds-1.51.0.jar,/root/.ivy2/jars/io.opencensus_opencensus-proto-0.2.0.jar,/root/.ivy2/jars/io.grpc_grpc-services-1.51.0.jar,/root/.ivy2/jars/com.google.re2j_re2j-1.6.jar,/root/.ivy2/jars/dk.brics.automaton_automaton-1.11-8.jar,/root/.ivy2/jars/org.slf4j_slf4j-api-1.7.16.jar'),\n", + " ('spark.driver.appUIAddress',\n", + " 'http://hub-msca-bdp-dphub-students-test-ridhi-m.c.msca-bdp-student-ap.internal:34467'),\n", + " ('spark.dataproc.sql.joinConditionReorder.enabled', 'true'),\n", + " ('spark.sql.autoBroadcastJoinThreshold', '191m'),\n", + " ('spark.eventLog.dir',\n", + " 'gs://dataproc-temp-us-central1-635155370842-uzamlpgc/4f3dcfe4-99eb-4d99-bb0e-a5a10f0bc58b/spark-job-history'),\n", + " ('spark.kryoserializer.buffer.max', '2000M'),\n", + " ('spark.serializer', 'org.apache.spark.serializer.KryoSerializer'),\n", + " ('spark.dataproc.sql.local.rank.pushdown.enabled', 'true'),\n", + " ('spark.driver.port', '34173'),\n", + " ('spark.app.id', 'application_1700846724434_0003'),\n", + " ('spark.driver.maxResultSize', '0'),\n", + " ('spark.yarn.unmanagedAM.enabled', 'true'),\n", + " ('spark.ui.filters',\n", + " 'org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter'),\n", + " ('spark.metrics.namespace',\n", + " 'app_name:${spark.app.name}.app_id:${spark.app.id}'),\n", + " ('spark.org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter.param.PROXY_HOSTS',\n", + " 'hub-msca-bdp-dphub-students-test-ridhi-m'),\n", + " ('spark.dataproc.sql.optimizer.leftsemijoin.conversion.enabled', 'true'),\n", + " ('spark.hadoop.hive.execution.engine', 'mr'),\n", + " ('spark.executor.id', 'driver'),\n", + " ('spark.driver.host',\n", + " 'hub-msca-bdp-dphub-students-test-ridhi-m.c.msca-bdp-student-ap.internal'),\n", + " ('spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version', '2'),\n", + " ('spark.dynamicAllocation.maxExecutors', '10000'),\n", + " ('spark.yarn.dist.pyFiles',\n", + " 'file:///root/.ivy2/jars/com.johnsnowlabs.nlp_spark-nlp_2.12-4.4.0.jar,file:///root/.ivy2/jars/graphframes_graphframes-0.8.2-spark3.1-s_2.12.jar,file:///root/.ivy2/jars/com.typesafe_config-1.4.2.jar,file:///root/.ivy2/jars/org.rocksdb_rocksdbjni-6.29.5.jar,file:///root/.ivy2/jars/com.amazonaws_aws-java-sdk-bundle-1.11.828.jar,file:///root/.ivy2/jars/com.github.universal-automata_liblevenshtein-3.0.0.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-storage-2.16.0.jar,file:///root/.ivy2/jars/com.navigamez_greex-1.0.jar,file:///root/.ivy2/jars/com.johnsnowlabs.nlp_tensorflow-cpu_2.12-0.4.4.jar,file:///root/.ivy2/jars/it.unimi.dsi_fastutil-7.0.12.jar,file:///root/.ivy2/jars/org.projectlombok_lombok-1.16.8.jar,file:///root/.ivy2/jars/com.google.guava_guava-31.1-jre.jar,file:///root/.ivy2/jars/com.google.guava_failureaccess-1.0.1.jar,file:///root/.ivy2/jars/com.google.guava_listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar,file:///root/.ivy2/jars/com.google.errorprone_error_prone_annotations-2.16.jar,file:///root/.ivy2/jars/com.google.j2objc_j2objc-annotations-1.3.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-1.42.3.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-contrib-http-util-0.31.1.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-jackson2-1.42.3.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-gson-1.42.3.jar,file:///root/.ivy2/jars/com.google.api-client_google-api-client-2.1.1.jar,file:///root/.ivy2/jars/commons-codec_commons-codec-1.15.jar,file:///root/.ivy2/jars/com.google.oauth-client_google-oauth-client-1.34.1.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-apache-v2-1.42.3.jar,file:///root/.ivy2/jars/com.google.apis_google-api-services-storage-v1-rev20220705-2.0.0.jar,file:///root/.ivy2/jars/com.google.code.gson_gson-2.10.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-2.9.0.jar,file:///root/.ivy2/jars/com.google.auto.value_auto-value-annotations-1.10.1.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-http-2.9.0.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-appengine-1.42.3.jar,file:///root/.ivy2/jars/com.google.api_gax-httpjson-0.105.1.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-grpc-2.9.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-core-1.51.0.jar,file:///root/.ivy2/jars/com.google.api_gax-2.20.1.jar,file:///root/.ivy2/jars/com.google.api_gax-grpc-2.20.1.jar,file:///root/.ivy2/jars/io.grpc_grpc-alts-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-grpclb-1.51.0.jar,file:///root/.ivy2/jars/org.conscrypt_conscrypt-openjdk-uber-2.5.2.jar,file:///root/.ivy2/jars/io.grpc_grpc-protobuf-1.51.0.jar,file:///root/.ivy2/jars/com.google.auth_google-auth-library-credentials-1.13.0.jar,file:///root/.ivy2/jars/com.google.auth_google-auth-library-oauth2-http-1.13.0.jar,file:///root/.ivy2/jars/com.google.api_api-common-2.2.2.jar,file:///root/.ivy2/jars/javax.annotation_javax.annotation-api-1.3.2.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-api-0.31.1.jar,file:///root/.ivy2/jars/io.grpc_grpc-context-1.51.0.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-iam-v1-1.6.22.jar,file:///root/.ivy2/jars/com.google.protobuf_protobuf-java-3.21.10.jar,file:///root/.ivy2/jars/com.google.protobuf_protobuf-java-util-3.21.10.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-common-protos-2.11.0.jar,file:///root/.ivy2/jars/org.threeten_threetenbp-1.6.4.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.google.api.grpc_grpc-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.google.api.grpc_gapic-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.fasterxml.jackson.core_jackson-core-2.14.1.jar,file:///root/.ivy2/jars/com.google.code.findbugs_jsr305-3.0.2.jar,file:///root/.ivy2/jars/io.grpc_grpc-api-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-auth-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-stub-1.51.0.jar,file:///root/.ivy2/jars/org.checkerframework_checker-qual-3.28.0.jar,file:///root/.ivy2/jars/com.google.api.grpc_grpc-google-iam-v1-1.6.22.jar,file:///root/.ivy2/jars/io.grpc_grpc-protobuf-lite-1.51.0.jar,file:///root/.ivy2/jars/com.google.android_annotations-4.1.1.4.jar,file:///root/.ivy2/jars/org.codehaus.mojo_animal-sniffer-annotations-1.22.jar,file:///root/.ivy2/jars/io.grpc_grpc-netty-shaded-1.51.0.jar,file:///root/.ivy2/jars/io.perfmark_perfmark-api-0.26.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-googleapis-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-xds-1.51.0.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-proto-0.2.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-services-1.51.0.jar,file:///root/.ivy2/jars/com.google.re2j_re2j-1.6.jar,file:///root/.ivy2/jars/dk.brics.automaton_automaton-1.11-8.jar,file:///root/.ivy2/jars/org.slf4j_slf4j-api-1.7.16.jar'),\n", + " ('spark.app.startTime', '1700858581930'),\n", + " ('spark.ui.proxyBase', '/proxy/application_1700846724434_0003'),\n", + " ('spark.yarn.am.attemptFailuresValidityInterval', '1h'),\n", + " ('spark.app.name', 'Spark Updated Conf'),\n", + " ('spark.sql.catalogImplementation', 'hive'),\n", + " ('spark.cores.max', '8'),\n", + " ('spark.executorEnv.OPENBLAS_NUM_THREADS', '1'),\n", + " ('spark.yarn.secondary.jars',\n", + " 'com.johnsnowlabs.nlp_spark-nlp_2.12-4.4.0.jar,graphframes_graphframes-0.8.2-spark3.1-s_2.12.jar,com.typesafe_config-1.4.2.jar,org.rocksdb_rocksdbjni-6.29.5.jar,com.amazonaws_aws-java-sdk-bundle-1.11.828.jar,com.github.universal-automata_liblevenshtein-3.0.0.jar,com.google.cloud_google-cloud-storage-2.16.0.jar,com.navigamez_greex-1.0.jar,com.johnsnowlabs.nlp_tensorflow-cpu_2.12-0.4.4.jar,it.unimi.dsi_fastutil-7.0.12.jar,org.projectlombok_lombok-1.16.8.jar,com.google.guava_guava-31.1-jre.jar,com.google.guava_failureaccess-1.0.1.jar,com.google.guava_listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar,com.google.errorprone_error_prone_annotations-2.16.jar,com.google.j2objc_j2objc-annotations-1.3.jar,com.google.http-client_google-http-client-1.42.3.jar,io.opencensus_opencensus-contrib-http-util-0.31.1.jar,com.google.http-client_google-http-client-jackson2-1.42.3.jar,com.google.http-client_google-http-client-gson-1.42.3.jar,com.google.api-client_google-api-client-2.1.1.jar,commons-codec_commons-codec-1.15.jar,com.google.oauth-client_google-oauth-client-1.34.1.jar,com.google.http-client_google-http-client-apache-v2-1.42.3.jar,com.google.apis_google-api-services-storage-v1-rev20220705-2.0.0.jar,com.google.code.gson_gson-2.10.jar,com.google.cloud_google-cloud-core-2.9.0.jar,com.google.auto.value_auto-value-annotations-1.10.1.jar,com.google.cloud_google-cloud-core-http-2.9.0.jar,com.google.http-client_google-http-client-appengine-1.42.3.jar,com.google.api_gax-httpjson-0.105.1.jar,com.google.cloud_google-cloud-core-grpc-2.9.0.jar,io.grpc_grpc-core-1.51.0.jar,com.google.api_gax-2.20.1.jar,com.google.api_gax-grpc-2.20.1.jar,io.grpc_grpc-alts-1.51.0.jar,io.grpc_grpc-grpclb-1.51.0.jar,org.conscrypt_conscrypt-openjdk-uber-2.5.2.jar,io.grpc_grpc-protobuf-1.51.0.jar,com.google.auth_google-auth-library-credentials-1.13.0.jar,com.google.auth_google-auth-library-oauth2-http-1.13.0.jar,com.google.api_api-common-2.2.2.jar,javax.annotation_javax.annotation-api-1.3.2.jar,io.opencensus_opencensus-api-0.31.1.jar,io.grpc_grpc-context-1.51.0.jar,com.google.api.grpc_proto-google-iam-v1-1.6.22.jar,com.google.protobuf_protobuf-java-3.21.10.jar,com.google.protobuf_protobuf-java-util-3.21.10.jar,com.google.api.grpc_proto-google-common-protos-2.11.0.jar,org.threeten_threetenbp-1.6.4.jar,com.google.api.grpc_proto-google-cloud-storage-v2-2.16.0-alpha.jar,com.google.api.grpc_grpc-google-cloud-storage-v2-2.16.0-alpha.jar,com.google.api.grpc_gapic-google-cloud-storage-v2-2.16.0-alpha.jar,com.fasterxml.jackson.core_jackson-core-2.14.1.jar,com.google.code.findbugs_jsr305-3.0.2.jar,io.grpc_grpc-api-1.51.0.jar,io.grpc_grpc-auth-1.51.0.jar,io.grpc_grpc-stub-1.51.0.jar,org.checkerframework_checker-qual-3.28.0.jar,com.google.api.grpc_grpc-google-iam-v1-1.6.22.jar,io.grpc_grpc-protobuf-lite-1.51.0.jar,com.google.android_annotations-4.1.1.4.jar,org.codehaus.mojo_animal-sniffer-annotations-1.22.jar,io.grpc_grpc-netty-shaded-1.51.0.jar,io.perfmark_perfmark-api-0.26.0.jar,io.grpc_grpc-googleapis-1.51.0.jar,io.grpc_grpc-xds-1.51.0.jar,io.opencensus_opencensus-proto-0.2.0.jar,io.grpc_grpc-services-1.51.0.jar,com.google.re2j_re2j-1.6.jar,dk.brics.automaton_automaton-1.11-8.jar,org.slf4j_slf4j-api-1.7.16.jar'),\n", + " ('spark.repl.local.jars',\n", + " 'file:///root/.ivy2/jars/com.johnsnowlabs.nlp_spark-nlp_2.12-4.4.0.jar,file:///root/.ivy2/jars/graphframes_graphframes-0.8.2-spark3.1-s_2.12.jar,file:///root/.ivy2/jars/com.typesafe_config-1.4.2.jar,file:///root/.ivy2/jars/org.rocksdb_rocksdbjni-6.29.5.jar,file:///root/.ivy2/jars/com.amazonaws_aws-java-sdk-bundle-1.11.828.jar,file:///root/.ivy2/jars/com.github.universal-automata_liblevenshtein-3.0.0.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-storage-2.16.0.jar,file:///root/.ivy2/jars/com.navigamez_greex-1.0.jar,file:///root/.ivy2/jars/com.johnsnowlabs.nlp_tensorflow-cpu_2.12-0.4.4.jar,file:///root/.ivy2/jars/it.unimi.dsi_fastutil-7.0.12.jar,file:///root/.ivy2/jars/org.projectlombok_lombok-1.16.8.jar,file:///root/.ivy2/jars/com.google.guava_guava-31.1-jre.jar,file:///root/.ivy2/jars/com.google.guava_failureaccess-1.0.1.jar,file:///root/.ivy2/jars/com.google.guava_listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar,file:///root/.ivy2/jars/com.google.errorprone_error_prone_annotations-2.16.jar,file:///root/.ivy2/jars/com.google.j2objc_j2objc-annotations-1.3.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-1.42.3.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-contrib-http-util-0.31.1.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-jackson2-1.42.3.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-gson-1.42.3.jar,file:///root/.ivy2/jars/com.google.api-client_google-api-client-2.1.1.jar,file:///root/.ivy2/jars/commons-codec_commons-codec-1.15.jar,file:///root/.ivy2/jars/com.google.oauth-client_google-oauth-client-1.34.1.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-apache-v2-1.42.3.jar,file:///root/.ivy2/jars/com.google.apis_google-api-services-storage-v1-rev20220705-2.0.0.jar,file:///root/.ivy2/jars/com.google.code.gson_gson-2.10.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-2.9.0.jar,file:///root/.ivy2/jars/com.google.auto.value_auto-value-annotations-1.10.1.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-http-2.9.0.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-appengine-1.42.3.jar,file:///root/.ivy2/jars/com.google.api_gax-httpjson-0.105.1.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-grpc-2.9.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-core-1.51.0.jar,file:///root/.ivy2/jars/com.google.api_gax-2.20.1.jar,file:///root/.ivy2/jars/com.google.api_gax-grpc-2.20.1.jar,file:///root/.ivy2/jars/io.grpc_grpc-alts-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-grpclb-1.51.0.jar,file:///root/.ivy2/jars/org.conscrypt_conscrypt-openjdk-uber-2.5.2.jar,file:///root/.ivy2/jars/io.grpc_grpc-protobuf-1.51.0.jar,file:///root/.ivy2/jars/com.google.auth_google-auth-library-credentials-1.13.0.jar,file:///root/.ivy2/jars/com.google.auth_google-auth-library-oauth2-http-1.13.0.jar,file:///root/.ivy2/jars/com.google.api_api-common-2.2.2.jar,file:///root/.ivy2/jars/javax.annotation_javax.annotation-api-1.3.2.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-api-0.31.1.jar,file:///root/.ivy2/jars/io.grpc_grpc-context-1.51.0.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-iam-v1-1.6.22.jar,file:///root/.ivy2/jars/com.google.protobuf_protobuf-java-3.21.10.jar,file:///root/.ivy2/jars/com.google.protobuf_protobuf-java-util-3.21.10.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-common-protos-2.11.0.jar,file:///root/.ivy2/jars/org.threeten_threetenbp-1.6.4.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.google.api.grpc_grpc-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.google.api.grpc_gapic-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.fasterxml.jackson.core_jackson-core-2.14.1.jar,file:///root/.ivy2/jars/com.google.code.findbugs_jsr305-3.0.2.jar,file:///root/.ivy2/jars/io.grpc_grpc-api-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-auth-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-stub-1.51.0.jar,file:///root/.ivy2/jars/org.checkerframework_checker-qual-3.28.0.jar,file:///root/.ivy2/jars/com.google.api.grpc_grpc-google-iam-v1-1.6.22.jar,file:///root/.ivy2/jars/io.grpc_grpc-protobuf-lite-1.51.0.jar,file:///root/.ivy2/jars/com.google.android_annotations-4.1.1.4.jar,file:///root/.ivy2/jars/org.codehaus.mojo_animal-sniffer-annotations-1.22.jar,file:///root/.ivy2/jars/io.grpc_grpc-netty-shaded-1.51.0.jar,file:///root/.ivy2/jars/io.perfmark_perfmark-api-0.26.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-googleapis-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-xds-1.51.0.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-proto-0.2.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-services-1.51.0.jar,file:///root/.ivy2/jars/com.google.re2j_re2j-1.6.jar,file:///root/.ivy2/jars/dk.brics.automaton_automaton-1.11-8.jar,file:///root/.ivy2/jars/org.slf4j_slf4j-api-1.7.16.jar'),\n", + " ('spark.sql.cbo.enabled', 'true'),\n", + " ('spark.executorEnv.PYTHONPATH',\n", + " '/usr/lib/spark/python/lib/py4j-0.10.9-src.zip:/usr/lib/spark/python/:{{PWD}}/pyspark.zip{{PWD}}/py4j-0.10.9-src.zip{{PWD}}/com.johnsnowlabs.nlp_spark-nlp_2.12-4.4.0.jar{{PWD}}/graphframes_graphframes-0.8.2-spark3.1-s_2.12.jar{{PWD}}/com.typesafe_config-1.4.2.jar{{PWD}}/org.rocksdb_rocksdbjni-6.29.5.jar{{PWD}}/com.amazonaws_aws-java-sdk-bundle-1.11.828.jar{{PWD}}/com.github.universal-automata_liblevenshtein-3.0.0.jar{{PWD}}/com.google.cloud_google-cloud-storage-2.16.0.jar{{PWD}}/com.navigamez_greex-1.0.jar{{PWD}}/com.johnsnowlabs.nlp_tensorflow-cpu_2.12-0.4.4.jar{{PWD}}/it.unimi.dsi_fastutil-7.0.12.jar{{PWD}}/org.projectlombok_lombok-1.16.8.jar{{PWD}}/com.google.guava_guava-31.1-jre.jar{{PWD}}/com.google.guava_failureaccess-1.0.1.jar{{PWD}}/com.google.guava_listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar{{PWD}}/com.google.errorprone_error_prone_annotations-2.16.jar{{PWD}}/com.google.j2objc_j2objc-annotations-1.3.jar{{PWD}}/com.google.http-client_google-http-client-1.42.3.jar{{PWD}}/io.opencensus_opencensus-contrib-http-util-0.31.1.jar{{PWD}}/com.google.http-client_google-http-client-jackson2-1.42.3.jar{{PWD}}/com.google.http-client_google-http-client-gson-1.42.3.jar{{PWD}}/com.google.api-client_google-api-client-2.1.1.jar{{PWD}}/commons-codec_commons-codec-1.15.jar{{PWD}}/com.google.oauth-client_google-oauth-client-1.34.1.jar{{PWD}}/com.google.http-client_google-http-client-apache-v2-1.42.3.jar{{PWD}}/com.google.apis_google-api-services-storage-v1-rev20220705-2.0.0.jar{{PWD}}/com.google.code.gson_gson-2.10.jar{{PWD}}/com.google.cloud_google-cloud-core-2.9.0.jar{{PWD}}/com.google.auto.value_auto-value-annotations-1.10.1.jar{{PWD}}/com.google.cloud_google-cloud-core-http-2.9.0.jar{{PWD}}/com.google.http-client_google-http-client-appengine-1.42.3.jar{{PWD}}/com.google.api_gax-httpjson-0.105.1.jar{{PWD}}/com.google.cloud_google-cloud-core-grpc-2.9.0.jar{{PWD}}/io.grpc_grpc-core-1.51.0.jar{{PWD}}/com.google.api_gax-2.20.1.jar{{PWD}}/com.google.api_gax-grpc-2.20.1.jar{{PWD}}/io.grpc_grpc-alts-1.51.0.jar{{PWD}}/io.grpc_grpc-grpclb-1.51.0.jar{{PWD}}/org.conscrypt_conscrypt-openjdk-uber-2.5.2.jar{{PWD}}/io.grpc_grpc-protobuf-1.51.0.jar{{PWD}}/com.google.auth_google-auth-library-credentials-1.13.0.jar{{PWD}}/com.google.auth_google-auth-library-oauth2-http-1.13.0.jar{{PWD}}/com.google.api_api-common-2.2.2.jar{{PWD}}/javax.annotation_javax.annotation-api-1.3.2.jar{{PWD}}/io.opencensus_opencensus-api-0.31.1.jar{{PWD}}/io.grpc_grpc-context-1.51.0.jar{{PWD}}/com.google.api.grpc_proto-google-iam-v1-1.6.22.jar{{PWD}}/com.google.protobuf_protobuf-java-3.21.10.jar{{PWD}}/com.google.protobuf_protobuf-java-util-3.21.10.jar{{PWD}}/com.google.api.grpc_proto-google-common-protos-2.11.0.jar{{PWD}}/org.threeten_threetenbp-1.6.4.jar{{PWD}}/com.google.api.grpc_proto-google-cloud-storage-v2-2.16.0-alpha.jar{{PWD}}/com.google.api.grpc_grpc-google-cloud-storage-v2-2.16.0-alpha.jar{{PWD}}/com.google.api.grpc_gapic-google-cloud-storage-v2-2.16.0-alpha.jar{{PWD}}/com.fasterxml.jackson.core_jackson-core-2.14.1.jar{{PWD}}/com.google.code.findbugs_jsr305-3.0.2.jar{{PWD}}/io.grpc_grpc-api-1.51.0.jar{{PWD}}/io.grpc_grpc-auth-1.51.0.jar{{PWD}}/io.grpc_grpc-stub-1.51.0.jar{{PWD}}/org.checkerframework_checker-qual-3.28.0.jar{{PWD}}/com.google.api.grpc_grpc-google-iam-v1-1.6.22.jar{{PWD}}/io.grpc_grpc-protobuf-lite-1.51.0.jar{{PWD}}/com.google.android_annotations-4.1.1.4.jar{{PWD}}/org.codehaus.mojo_animal-sniffer-annotations-1.22.jar{{PWD}}/io.grpc_grpc-netty-shaded-1.51.0.jar{{PWD}}/io.perfmark_perfmark-api-0.26.0.jar{{PWD}}/io.grpc_grpc-googleapis-1.51.0.jar{{PWD}}/io.grpc_grpc-xds-1.51.0.jar{{PWD}}/io.opencensus_opencensus-proto-0.2.0.jar{{PWD}}/io.grpc_grpc-services-1.51.0.jar{{PWD}}/com.google.re2j_re2j-1.6.jar{{PWD}}/dk.brics.automaton_automaton-1.11-8.jar{{PWD}}/org.slf4j_slf4j-api-1.7.16.jar'),\n", + " ('spark.yarn.dist.jars',\n", + " 'file:///root/.ivy2/jars/com.johnsnowlabs.nlp_spark-nlp_2.12-4.4.0.jar,file:///root/.ivy2/jars/graphframes_graphframes-0.8.2-spark3.1-s_2.12.jar,file:///root/.ivy2/jars/com.typesafe_config-1.4.2.jar,file:///root/.ivy2/jars/org.rocksdb_rocksdbjni-6.29.5.jar,file:///root/.ivy2/jars/com.amazonaws_aws-java-sdk-bundle-1.11.828.jar,file:///root/.ivy2/jars/com.github.universal-automata_liblevenshtein-3.0.0.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-storage-2.16.0.jar,file:///root/.ivy2/jars/com.navigamez_greex-1.0.jar,file:///root/.ivy2/jars/com.johnsnowlabs.nlp_tensorflow-cpu_2.12-0.4.4.jar,file:///root/.ivy2/jars/it.unimi.dsi_fastutil-7.0.12.jar,file:///root/.ivy2/jars/org.projectlombok_lombok-1.16.8.jar,file:///root/.ivy2/jars/com.google.guava_guava-31.1-jre.jar,file:///root/.ivy2/jars/com.google.guava_failureaccess-1.0.1.jar,file:///root/.ivy2/jars/com.google.guava_listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar,file:///root/.ivy2/jars/com.google.errorprone_error_prone_annotations-2.16.jar,file:///root/.ivy2/jars/com.google.j2objc_j2objc-annotations-1.3.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-1.42.3.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-contrib-http-util-0.31.1.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-jackson2-1.42.3.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-gson-1.42.3.jar,file:///root/.ivy2/jars/com.google.api-client_google-api-client-2.1.1.jar,file:///root/.ivy2/jars/commons-codec_commons-codec-1.15.jar,file:///root/.ivy2/jars/com.google.oauth-client_google-oauth-client-1.34.1.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-apache-v2-1.42.3.jar,file:///root/.ivy2/jars/com.google.apis_google-api-services-storage-v1-rev20220705-2.0.0.jar,file:///root/.ivy2/jars/com.google.code.gson_gson-2.10.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-2.9.0.jar,file:///root/.ivy2/jars/com.google.auto.value_auto-value-annotations-1.10.1.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-http-2.9.0.jar,file:///root/.ivy2/jars/com.google.http-client_google-http-client-appengine-1.42.3.jar,file:///root/.ivy2/jars/com.google.api_gax-httpjson-0.105.1.jar,file:///root/.ivy2/jars/com.google.cloud_google-cloud-core-grpc-2.9.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-core-1.51.0.jar,file:///root/.ivy2/jars/com.google.api_gax-2.20.1.jar,file:///root/.ivy2/jars/com.google.api_gax-grpc-2.20.1.jar,file:///root/.ivy2/jars/io.grpc_grpc-alts-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-grpclb-1.51.0.jar,file:///root/.ivy2/jars/org.conscrypt_conscrypt-openjdk-uber-2.5.2.jar,file:///root/.ivy2/jars/io.grpc_grpc-protobuf-1.51.0.jar,file:///root/.ivy2/jars/com.google.auth_google-auth-library-credentials-1.13.0.jar,file:///root/.ivy2/jars/com.google.auth_google-auth-library-oauth2-http-1.13.0.jar,file:///root/.ivy2/jars/com.google.api_api-common-2.2.2.jar,file:///root/.ivy2/jars/javax.annotation_javax.annotation-api-1.3.2.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-api-0.31.1.jar,file:///root/.ivy2/jars/io.grpc_grpc-context-1.51.0.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-iam-v1-1.6.22.jar,file:///root/.ivy2/jars/com.google.protobuf_protobuf-java-3.21.10.jar,file:///root/.ivy2/jars/com.google.protobuf_protobuf-java-util-3.21.10.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-common-protos-2.11.0.jar,file:///root/.ivy2/jars/org.threeten_threetenbp-1.6.4.jar,file:///root/.ivy2/jars/com.google.api.grpc_proto-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.google.api.grpc_grpc-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.google.api.grpc_gapic-google-cloud-storage-v2-2.16.0-alpha.jar,file:///root/.ivy2/jars/com.fasterxml.jackson.core_jackson-core-2.14.1.jar,file:///root/.ivy2/jars/com.google.code.findbugs_jsr305-3.0.2.jar,file:///root/.ivy2/jars/io.grpc_grpc-api-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-auth-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-stub-1.51.0.jar,file:///root/.ivy2/jars/org.checkerframework_checker-qual-3.28.0.jar,file:///root/.ivy2/jars/com.google.api.grpc_grpc-google-iam-v1-1.6.22.jar,file:///root/.ivy2/jars/io.grpc_grpc-protobuf-lite-1.51.0.jar,file:///root/.ivy2/jars/com.google.android_annotations-4.1.1.4.jar,file:///root/.ivy2/jars/org.codehaus.mojo_animal-sniffer-annotations-1.22.jar,file:///root/.ivy2/jars/io.grpc_grpc-netty-shaded-1.51.0.jar,file:///root/.ivy2/jars/io.perfmark_perfmark-api-0.26.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-googleapis-1.51.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-xds-1.51.0.jar,file:///root/.ivy2/jars/io.opencensus_opencensus-proto-0.2.0.jar,file:///root/.ivy2/jars/io.grpc_grpc-services-1.51.0.jar,file:///root/.ivy2/jars/com.google.re2j_re2j-1.6.jar,file:///root/.ivy2/jars/dk.brics.automaton_automaton-1.11-8.jar,file:///root/.ivy2/jars/org.slf4j_slf4j-api-1.7.16.jar'),\n", + " ('spark.dataproc.sql.parquet.enableFooterCache', 'true'),\n", + " ('spark.sql.warehouse.dir', 'file:/spark-warehouse'),\n", + " ('spark.yarn.executor.failuresValidityInterval', '1h'),\n", + " ('spark.dataproc.metrics.listener.metrics.collector.hostname',\n", + " 'hub-msca-bdp-dphub-students-test-ridhi-m'),\n", + " ('spark.yarn.am.memory', '640m'),\n", + " ('spark.jars.packages',\n", + " 'com.johnsnowlabs.nlp:spark-nlp_2.12:4.4.0,graphframes:graphframes:0.8.2-spark3.1-s_2.12'),\n", + " ('spark.executor.instances', '2'),\n", + " ('spark.dataproc.listeners',\n", + " 'com.google.cloud.spark.performance.DataprocMetricsListener'),\n", + " ('spark.history.fs.logDirectory',\n", + " 'gs://dataproc-temp-us-central1-635155370842-uzamlpgc/4f3dcfe4-99eb-4d99-bb0e-a5a10f0bc58b/spark-job-history'),\n", + " ('spark.driver.memory', '8g'),\n", + " ('spark.serializer.objectStreamReset', '100'),\n", + " ('spark.executor.memory', '8g'),\n", + " ('spark.submit.deployMode', 'client'),\n", + " ('spark.executor.cores', '8'),\n", + " ('spark.sql.cbo.joinReorder.enabled', 'true'),\n", + " ('spark.shuffle.service.enabled', 'true'),\n", + " ('spark.scheduler.mode', 'FAIR'),\n", + " ('spark.sql.adaptive.enabled', 'true'),\n", + " ('spark.yarn.jars', 'local:/usr/lib/spark/jars/*'),\n", + " ('spark.scheduler.minRegisteredResourcesRatio', '0.0'),\n", + " ('spark.org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter.param.PROXY_URI_BASES',\n", + " 'http://hub-msca-bdp-dphub-students-test-ridhi-m:8088/proxy/application_1700846724434_0003'),\n", + " ('spark.master', 'yarn'),\n", + " ('spark.ui.port', '0'),\n", + " ('spark.rpc.message.maxSize', '512'),\n", + " ('spark.rdd.compress', 'True'),\n", + " ('spark.task.maxFailures', '10'),\n", + " ('spark.yarn.isPython', 'true'),\n", + " ('spark.dynamicAllocation.enabled', 'true'),\n", + " ('spark.yarn.historyServer.address',\n", + " 'hub-msca-bdp-dphub-students-test-ridhi-m:18080'),\n", + " ('spark.ui.showConsoleProgress', 'true')]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from pyspark.sql import SparkSession\n", + "from pyspark.sql import functions as F\n", + "\n", + "import matplotlib.pyplot as plt\n", + "%matplotlib inline\n", + "\n", + "spark = SparkSession.builder.appName('unsupervised').getOrCreate()\n", + "\n", + "#change configuration settings on Spark \n", + "conf = spark.sparkContext._conf.setAll([('spark.executor.memory', '8g'), ('spark.app.name', 'Spark Updated Conf'),\\\n", + " ('spark.executor.cores', '8'), ('spark.cores.max', '8'), ('spark.driver.memory','8g')])\n", + "\n", + "spark.sparkContext.getConf().getAll()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5a2f2b8-c263-4ea9-bacd-34d8a1b909b2", + "metadata": {}, + "outputs": [], + "source": [ + "spark.stop()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "b6c5fdcd-e1af-4e2a-b177-7acc4e3dbec6", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------------+-------------------+-------------------+-------+-----+------------+-------------+-----------+------------+----+---+-----+-------------+--------------+-------------+--------------+-----+------------+----+---+\n", + "| ID| start_timestamp| end_timestamp|seconds|miles|pickup_tract|dropoff_tract|pickup_area|dropoff_area|Fare|Tip|total| pickup_lat| pickup_lon| dropoff_lat| dropoff_lon|month|day_of_month|hour|day|\n", + "+--------------------+-------------------+-------------------+-------+-----+------------+-------------+-----------+------------+----+---+-----+-------------+--------------+-------------+--------------+-----+------------+----+---+\n", + "|625e77ae6e0ff7191...|2018-11-06 19:00:00|2018-11-06 19:15:00| 1142| 5.8| 17031063400| 17031010400| 6| 1|12.5| 0| 15.0|41.9346591566|-87.6467297286| 42.004764559| -87.659122427| 11| 6| 19| 3|\n", + "|62945fdb2e70957f0...|2018-11-06 19:00:00|2018-11-06 19:00:00| 341| 1.2| 17031081800| 17031833000| 8| 28| 5.0| 0| 7.5|41.8932163595|-87.6378442095|41.8852813201|-87.6572331997| 11| 6| 19| 3|\n", + "|6dc03f91e4480d237...|2018-11-06 19:00:00|2018-11-06 19:00:00| 558| 1.2| 17031070400| 17031061500| 7| 6| 7.5| 0| 10.3|41.9289672664|-87.6561568309|41.9452823311|-87.6615450961| 11| 6| 19| 3|\n", + "|773894079a526afa1...|2018-11-06 19:00:00|2018-11-06 19:30:00| 1047| 2.8| 17031832200| 17031062100| 22| 6|10.0| 2| 14.5|41.9204515116|-87.6799547678|41.9426918444|-87.6517705068| 11| 6| 19| 3|\n", + "|7acf0a7f2edfbe546...|2018-11-06 19:00:00|2018-11-06 19:00:00| 502| 1.3| 17031839100| 17031081700| 32| 8| 2.5| 0| 5.0|41.8809944707|-87.6327464887|41.8920421365|-87.6318639497| 11| 6| 19| 3|\n", + "+--------------------+-------------------+-------------------+-------+-----+------------+-------------+-----------+------------+----+---+-----+-------------+--------------+-------------+--------------+-----+------------+----+---+\n", + "only showing top 5 rows\n", + "\n" + ] + } + ], + "source": [ + "# read in rideshare data for all years, concatenate, create appropriate partitioning\n", + "# we are dropping 2020 because covid will affect the performance of our model\n", + "\n", + "df_2018 = spark.read.csv(\"gs://msca-bdp-student-gcs/bdp-rideshare-project/rideshare/processed_data/rides_2018.csv\", inferSchema=True, header=True)\n", + "df_2019 = spark.read.csv(\"gs://msca-bdp-student-gcs/bdp-rideshare-project/rideshare/processed_data/rides_2019.csv\", inferSchema=True, header=True)\n", + "df_2021 = spark.read.csv(\"gs://msca-bdp-student-gcs/bdp-rideshare-project/rideshare/processed_data/rides_2021.csv\", inferSchema=True, header=True)\n", + "df_2022 = spark.read.csv(\"gs://msca-bdp-student-gcs/bdp-rideshare-project/rideshare/processed_data/rides_2022.csv\", inferSchema=True, header=True)\n", + "df_2023 = spark.read.csv(\"gs://msca-bdp-student-gcs/bdp-rideshare-project/rideshare/processed_data/rides_2023.csv\", inferSchema=True, header=True)\n", + "\n", + "# dropping new columns in 2023\n", + "df_2023 = df_2023.drop('Shared Trip Match','Percent Time Chicago','Percent Distance Chicago')\n", + "\n", + "df_all = df_2018.union(df_2019).union(df_2021).union(df_2022).union(df_2023)\n", + "df_all.show(5)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "98809f0b-4a5a-4812-9a39-f91406ffecd4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Partitions: 534\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Stage 16:=====================================================>(528 + 6) / 534]\r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+-----------+------+\n", + "|partitionId| count|\n", + "+-----------+------+\n", + "| 33|152646|\n", + "| 233|328837|\n", + "| 232|328975|\n", + "| 231|329131|\n", + "| 230|329163|\n", + "| 229|329209|\n", + "| 227|329245|\n", + "| 225|329263|\n", + "| 228|329263|\n", + "| 224|329311|\n", + "| 226|329315|\n", + "| 222|329332|\n", + "| 223|329344|\n", + "| 221|329373|\n", + "| 218|329389|\n", + "| 219|329390|\n", + "| 217|329399|\n", + "| 215|329410|\n", + "| 216|329410|\n", + "| 214|329418|\n", + "| 220|329427|\n", + "| 213|329428|\n", + "| 210|329461|\n", + "| 212|329481|\n", + "| 211|329505|\n", + "| 207|329507|\n", + "| 208|329513|\n", + "| 209|329519|\n", + "| 206|329523|\n", + "| 204|329533|\n", + "| 203|329555|\n", + "| 205|329574|\n", + "| 201|329587|\n", + "| 202|329591|\n", + "| 198|329607|\n", + "| 200|329623|\n", + "| 196|329624|\n", + "| 199|329630|\n", + "| 197|329633|\n", + "| 195|329646|\n", + "| 192|329654|\n", + "| 194|329673|\n", + "| 193|329678|\n", + "| 184|329704|\n", + "| 191|329708|\n", + "| 190|329712|\n", + "| 181|329717|\n", + "| 179|329728|\n", + "| 178|329730|\n", + "| 189|329732|\n", + "| 183|329732|\n", + "| 188|329739|\n", + "| 180|329746|\n", + "| 185|329748|\n", + "| 187|329786|\n", + "| 186|329786|\n", + "| 176|329786|\n", + "| 177|329787|\n", + "| 182|329793|\n", + "| 173|329804|\n", + "| 174|329805|\n", + "| 169|329807|\n", + "| 172|329814|\n", + "| 171|329819|\n", + "| 175|329843|\n", + "| 168|329849|\n", + "| 167|329861|\n", + "| 170|329865|\n", + "| 162|329866|\n", + "| 163|329871|\n", + "| 161|329879|\n", + "| 165|329889|\n", + "| 164|329892|\n", + "| 166|329900|\n", + "| 160|329937|\n", + "| 158|329956|\n", + "| 152|329961|\n", + "| 157|329962|\n", + "| 154|329965|\n", + "| 159|329974|\n", + "| 155|329980|\n", + "| 153|329985|\n", + "| 149|329993|\n", + "| 156|330000|\n", + "| 148|330005|\n", + "| 151|330014|\n", + "| 147|330021|\n", + "| 150|330027|\n", + "| 146|330059|\n", + "| 144|330066|\n", + "| 137|330070|\n", + "| 143|330083|\n", + "| 140|330084|\n", + "| 145|330084|\n", + "| 136|330089|\n", + "| 142|330090|\n", + "| 139|330099|\n", + "| 141|330101|\n", + "| 138|330103|\n", + "| 134|330116|\n", + "| 135|330129|\n", + "| 130|330133|\n", + "| 133|330135|\n", + "| 131|330139|\n", + "| 132|330148|\n", + "| 129|330159|\n", + "| 125|330178|\n", + "| 127|330181|\n", + "| 122|330186|\n", + "| 128|330189|\n", + "| 126|330205|\n", + "| 119|330208|\n", + "| 123|330219|\n", + "| 115|330220|\n", + "| 118|330227|\n", + "| 124|330230|\n", + "| 117|330236|\n", + "| 120|330243|\n", + "| 114|330245|\n", + "| 107|330249|\n", + "| 121|330252|\n", + "| 111|330255|\n", + "| 112|330258|\n", + "| 116|330268|\n", + "| 113|330270|\n", + "| 108|330275|\n", + "| 103|330282|\n", + "| 105|330282|\n", + "| 110|330296|\n", + "| 102|330297|\n", + "| 109|330306|\n", + "| 104|330308|\n", + "| 94|330338|\n", + "| 106|330339|\n", + "| 99|330355|\n", + "| 95|330363|\n", + "| 101|330366|\n", + "| 100|330372|\n", + "| 98|330383|\n", + "| 97|330385|\n", + "| 96|330402|\n", + "| 86|330413|\n", + "| 92|330415|\n", + "| 90|330418|\n", + "| 88|330418|\n", + "| 91|330419|\n", + "| 84|330425|\n", + "| 87|330426|\n", + "| 93|330430|\n", + "| 89|330430|\n", + "| 85|330436|\n", + "| 80|330456|\n", + "| 82|330470|\n", + "| 78|330471|\n", + "| 81|330474|\n", + "| 83|330477|\n", + "| 77|330494|\n", + "| 76|330501|\n", + "| 79|330512|\n", + "| 74|330524|\n", + "| 72|330550|\n", + "| 75|330570|\n", + "| 70|330570|\n", + "| 71|330579|\n", + "| 73|330585|\n", + "| 68|330622|\n", + "| 66|330625|\n", + "| 69|330625|\n", + "| 65|330642|\n", + "| 67|330646|\n", + "| 61|330651|\n", + "| 64|330653|\n", + "| 63|330667|\n", + "| 60|330690|\n", + "| 55|330704|\n", + "| 62|330709|\n", + "| 57|330721|\n", + "| 56|330724|\n", + "| 59|330737|\n", + "| 52|330758|\n", + "| 53|330762|\n", + "| 54|330762|\n", + "| 58|330766|\n", + "| 50|330782|\n", + "| 49|330784|\n", + "| 46|330801|\n", + "| 47|330805|\n", + "| 51|330807|\n", + "| 48|330837|\n", + "| 43|330868|\n", + "| 45|330869|\n", + "| 44|330885|\n", + "| 42|330918|\n", + "| 40|330944|\n", + "| 41|330963|\n", + "| 39|331028|\n", + "| 38|331034|\n", + "| 37|331050|\n", + "| 36|331114|\n", + "| 35|331284|\n", + "| 34|331416|\n", + "| 533|364094|\n", + "| 532|364374|\n", + "| 531|364493|\n", + "| 527|364581|\n", + "| 528|364599|\n", + "| 529|364616|\n", + "| 530|364617|\n", + "| 526|364654|\n", + "| 524|364709|\n", + "| 525|364756|\n", + "| 522|364784|\n", + "| 523|364810|\n", + "| 519|364899|\n", + "| 520|364903|\n", + "| 521|364944|\n", + "| 518|364957|\n", + "| 517|364961|\n", + "| 514|364971|\n", + "| 515|364988|\n", + "| 516|365006|\n", + "| 512|365011|\n", + "| 513|365051|\n", + "| 511|365057|\n", + "| 510|365079|\n", + "| 508|365083|\n", + "| 507|365090|\n", + "| 509|365097|\n", + "| 506|365122|\n", + "| 504|365165|\n", + "| 505|365179|\n", + "| 503|365224|\n", + "| 499|365252|\n", + "| 496|365253|\n", + "| 501|365255|\n", + "| 498|365272|\n", + "| 500|365277|\n", + "| 502|365278|\n", + "| 497|365302|\n", + "| 495|365347|\n", + "| 492|365377|\n", + "| 493|365394|\n", + "| 494|365395|\n", + "| 491|365409|\n", + "| 490|365431|\n", + "| 488|365447|\n", + "| 489|365454|\n", + "| 487|365519|\n", + "| 486|365528|\n", + "| 485|365536|\n", + "| 482|365541|\n", + "| 479|365547|\n", + "| 478|365552|\n", + "| 477|365554|\n", + "| 480|365569|\n", + "| 483|365574|\n", + "| 474|365576|\n", + "| 484|365595|\n", + "| 475|365602|\n", + "| 476|365622|\n", + "| 481|365622|\n", + "| 473|365650|\n", + "| 472|365684|\n", + "| 471|365705|\n", + "| 469|365750|\n", + "| 468|365773|\n", + "| 467|365793|\n", + "| 470|365801|\n", + "| 464|365806|\n", + "| 465|365806|\n", + "| 463|365828|\n", + "| 466|365846|\n", + "| 462|365909|\n", + "| 461|365965|\n", + "| 460|365975|\n", + "| 459|366026|\n", + "| 456|366051|\n", + "| 457|366057|\n", + "| 458|366080|\n", + "| 454|366105|\n", + "| 455|366117|\n", + "| 452|366150|\n", + "| 453|366160|\n", + "| 448|366193|\n", + "| 451|366200|\n", + "| 450|366214|\n", + "| 449|366217|\n", + "| 446|366297|\n", + "| 447|366320|\n", + "| 445|366371|\n", + "| 444|366383|\n", + "| 443|366422|\n", + "| 442|366461|\n", + "| 441|366589|\n", + "| 440|366617|\n", + "| 439|366758|\n", + "| 438|366799|\n", + "| 437|366883|\n", + "| 436|366901|\n", + "| 435|366940|\n", + "| 434|367122|\n", + "| 21|380513|\n", + "| 20|380565|\n", + "| 19|380749|\n", + "| 18|381028|\n", + "| 17|381069|\n", + "| 16|381243|\n", + "| 15|381263|\n", + "| 14|381438|\n", + "| 13|381470|\n", + "| 12|381544|\n", + "| 11|381646|\n", + "| 10|381711|\n", + "| 8|381721|\n", + "| 9|381753|\n", + "| 7|381759|\n", + "| 6|381763|\n", + "| 5|381783|\n", + "| 4|381827|\n", + "| 3|381971|\n", + "| 1|382022|\n", + "| 2|382029|\n", + "| 0|382095|\n", + "| 332|420259|\n", + "| 333|420346|\n", + "| 331|420485|\n", + "| 330|420525|\n", + "| 329|420707|\n", + "| 326|421031|\n", + "| 327|421040|\n", + "| 328|421052|\n", + "| 324|421107|\n", + "| 325|421142|\n", + "| 323|421374|\n", + "| 320|421440|\n", + "| 322|421479|\n", + "| 321|421531|\n", + "| 317|421574|\n", + "| 318|421603|\n", + "| 319|421610|\n", + "| 316|421612|\n", + "| 312|421670|\n", + "| 310|421675|\n", + "| 315|421679|\n", + "| 314|421681|\n", + "| 313|421687|\n", + "| 309|421687|\n", + "| 311|421699|\n", + "| 308|421751|\n", + "| 305|421832|\n", + "| 300|421867|\n", + "| 306|421897|\n", + "| 302|421903|\n", + "| 307|421911|\n", + "| 304|421918|\n", + "| 303|421920|\n", + "| 301|421950|\n", + "| 299|421972|\n", + "| 297|421988|\n", + "| 298|422019|\n", + "| 295|422072|\n", + "| 293|422083|\n", + "| 296|422091|\n", + "| 294|422095|\n", + "| 292|422097|\n", + "| 288|422103|\n", + "| 290|422114|\n", + "| 291|422116|\n", + "| 285|422134|\n", + "| 286|422155|\n", + "| 289|422155|\n", + "| 280|422185|\n", + "| 287|422193|\n", + "| 284|422194|\n", + "| 282|422207|\n", + "| 281|422218|\n", + "| 283|422236|\n", + "| 278|422238|\n", + "| 276|422255|\n", + "| 279|422265|\n", + "| 277|422266|\n", + "| 275|422305|\n", + "| 273|422307|\n", + "| 274|422346|\n", + "| 272|422350|\n", + "| 271|422354|\n", + "| 270|422372|\n", + "| 269|422415|\n", + "| 268|422498|\n", + "| 267|422501|\n", + "| 266|422508|\n", + "| 265|422549|\n", + "| 264|422557|\n", + "| 263|422591|\n", + "| 262|422625|\n", + "| 260|422634|\n", + "| 259|422671|\n", + "| 258|422673|\n", + "| 261|422692|\n", + "| 257|422694|\n", + "| 255|422761|\n", + "| 252|422777|\n", + "| 250|422788|\n", + "| 253|422795|\n", + "| 256|422803|\n", + "| 254|422807|\n", + "| 248|422838|\n", + "| 249|422839|\n", + "| 251|422841|\n", + "| 247|422852|\n", + "| 246|422891|\n", + "| 242|422904|\n", + "| 245|422925|\n", + "| 244|422986|\n", + "| 243|423003|\n", + "| 240|423197|\n", + "| 241|423202|\n", + "| 238|423231|\n", + "| 239|423262|\n", + "| 236|423376|\n", + "| 237|423402|\n", + "| 235|423403|\n", + "| 234|423762|\n", + "| 433|569570|\n", + "| 432|570154|\n", + "| 431|570301|\n", + "| 430|570372|\n", + "| 429|570572|\n", + "| 428|570655|\n", + "| 426|570763|\n", + "| 427|570781|\n", + "| 424|570870|\n", + "| 425|570872|\n", + "| 423|570953|\n", + "| 422|570979|\n", + "| 421|571069|\n", + "| 419|571096|\n", + "| 420|571097|\n", + "| 418|571127|\n", + "| 417|571153|\n", + "| 416|571185|\n", + "| 415|571201|\n", + "| 414|571286|\n", + "| 413|571425|\n", + "| 412|571449|\n", + "| 407|571506|\n", + "| 410|571528|\n", + "| 411|571532|\n", + "| 409|571553|\n", + "| 408|571585|\n", + "| 406|571595|\n", + "| 404|571645|\n", + "| 405|571657|\n", + "| 403|571742|\n", + "| 402|571766|\n", + "| 401|571796|\n", + "| 399|571842|\n", + "| 400|571847|\n", + "| 397|571874|\n", + "| 398|571913|\n", + "| 396|571925|\n", + "| 395|571966|\n", + "| 394|571983|\n", + "| 392|571993|\n", + "| 393|572020|\n", + "| 391|572123|\n", + "| 387|572181|\n", + "| 390|572182|\n", + "| 389|572183|\n", + "| 388|572189|\n", + "| 386|572212|\n", + "| 385|572244|\n", + "| 383|572249|\n", + "| 384|572276|\n", + "| 382|572302|\n", + "| 381|572344|\n", + "| 380|572361|\n", + "| 379|572382|\n", + "| 378|572394|\n", + "| 377|572428|\n", + "| 376|572438|\n", + "| 375|572493|\n", + "| 373|572545|\n", + "| 374|572565|\n", + "| 372|572569|\n", + "| 371|572600|\n", + "| 370|572604|\n", + "| 369|572634|\n", + "| 368|572647|\n", + "| 365|572742|\n", + "| 366|572742|\n", + "| 367|572755|\n", + "| 364|572798|\n", + "| 362|572800|\n", + "| 363|572816|\n", + "| 361|572868|\n", + "| 360|572895|\n", + "| 359|572907|\n", + "| 358|572924|\n", + "| 357|572957|\n", + "| 356|573022|\n", + "| 354|573102|\n", + "| 355|573104|\n", + "| 352|573132|\n", + "| 353|573145|\n", + "| 351|573173|\n", + "| 350|573187|\n", + "| 348|573262|\n", + "| 349|573270|\n", + "| 347|573334|\n", + "| 346|573372|\n", + "| 345|573425|\n", + "| 344|573556|\n", + "| 343|573584|\n", + "| 342|573658|\n", + "| 341|573676|\n", + "| 340|573781|\n", + "| 339|573977|\n", + "| 337|574013|\n", + "| 338|574040|\n", + "| 336|574185|\n", + "| 335|574318|\n", + "| 334|574727|\n", + "| 32|610310|\n", + "| 31|610736|\n", + "| 30|610951|\n", + "| 29|611294|\n", + "| 28|611761|\n", + "| 27|611933|\n", + "| 26|612048|\n", + "| 25|612117|\n", + "| 24|612529|\n", + "| 23|613100|\n", + "| 22|613957|\n", + "+-----------+------+\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + } + ], + "source": [ + "#display number of records by partition\n", + "def displaypartitions(df):\n", + " #number of records by partition\n", + " num = df.rdd.getNumPartitions()\n", + " print(\"Partitions:\", num)\n", + " df.withColumn(\"partitionId\", F.spark_partition_id())\\\n", + " .groupBy(\"partitionId\")\\\n", + " .count()\\\n", + " .orderBy(F.asc(\"count\"))\\\n", + " .show(num)\n", + "\n", + "df_all.rdd.getNumPartitions()\n", + "displaypartitions(df_all)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "6cf0065d-4941-4423-b5a3-967e0b960b60", + "metadata": {}, + "outputs": [], + "source": [ + "# repartitioning to 600 partitions\n", + "df_all = df_all.repartition(600).cache()\n", + "#displaypartitions(df_all)" + ] + }, + { + "cell_type": "markdown", + "id": "bc71fd42-d129-4f21-a798-972e664e0daa", + "metadata": {}, + "source": [ + "**Feature Engineering**\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "bd42c068-d06c-4f49-98e8-751942958e03", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[Stage 17:> (0 + 1) / 1]\r" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+-------------+\n", + "|trip_duration|\n", + "+-------------+\n", + "| 30.0|\n", + "| 30.0|\n", + "| 30.0|\n", + "| 0.0|\n", + "| 30.0|\n", + "| 15.0|\n", + "| 15.0|\n", + "| 15.0|\n", + "| 15.0|\n", + "| 15.0|\n", + "| 15.0|\n", + "| 15.0|\n", + "| 60.0|\n", + "| 15.0|\n", + "| 45.0|\n", + "| 15.0|\n", + "| 0.0|\n", + "| 30.0|\n", + "| 15.0|\n", + "| 45.0|\n", + "+-------------+\n", + "only showing top 20 rows\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " \r" + ] + } + ], + "source": [ + "from pyspark.sql import SparkSession\n", + "from pyspark.sql.functions import col, unix_timestamp\n", + "\n", + "df_all = df_all.withColumn(\"start_timestamp\", unix_timestamp(\"start_timestamp\"))\n", + "df_all = df_all.withColumn(\"end_timestamp\", unix_timestamp(\"end_timestamp\"))\n", + "\n", + "# Calculate trip duration in seconds\n", + "df_all = df_all.withColumn(\"trip_duration\", col(\"end_timestamp\") - col(\"start_timestamp\"))\n", + "\n", + "# Convert seconds to minutes\n", + "df_all = df_all.withColumn(\"trip_duration\", col(\"trip_duration\") / 60)\n", + "\n", + "df_all.select('trip_duration').show()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "03b80994-b0de-43e2-ade9-82ed5d8fc6b1", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/11/24 21:05:28 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:28 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 566.0 in stage 19.0 (TID 2294) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:31 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:31 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 567.0 in stage 19.0 (TID 2295) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:33 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:33 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 578.0 in stage 19.0 (TID 2307) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:35 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:35 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 563.0 in stage 19.0 (TID 2291) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:38 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:38 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 576.0 in stage 19.0 (TID 2305) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:40 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:40 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 430.1 in stage 19.0 (TID 2302) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:44 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:44 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:44 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 577.0 in stage 19.0 (TID 2306) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:44 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 574.0 in stage 19.0 (TID 2303) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:46 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:46 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 561.0 in stage 19.0 (TID 2287) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:49 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:49 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 575.0 in stage 19.0 (TID 2304) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:51 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:51 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 569.0 in stage 19.0 (TID 2297) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:54 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:54 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 579.0 in stage 19.0 (TID 2308) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:57 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:58 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:58 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 427.1 in stage 19.0 (TID 2311) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:58 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 565.0 in stage 19.0 (TID 2293) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:00 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:01 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:01 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 443.1 in stage 19.0 (TID 2312) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:01 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 564.0 in stage 19.0 (TID 2292) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:05 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:06 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:06 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 454.1 in stage 19.0 (TID 2313) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:06 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 580.0 in stage 19.0 (TID 2309) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:08 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:08 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 445.1 in stage 19.0 (TID 2289) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:11 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:11 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 441.1 in stage 19.0 (TID 2326) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:13 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:13 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 570.0 in stage 19.0 (TID 2298) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:16 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:16 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 584.0 in stage 19.0 (TID 2316) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:19 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:19 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 568.0 in stage 19.0 (TID 2296) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:21 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:21 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 582.0 in stage 19.0 (TID 2314) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:24 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:25 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 560.0 in stage 19.0 (TID 2286) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:26 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:26 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 587.0 in stage 19.0 (TID 2319) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:29 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:29 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 586.0 in stage 19.0 (TID 2318) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:06:32 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:06:32 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 583.0 in stage 19.0 (TID 2315) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n" + ] + } + ], + "source": [ + "df_2018 = df_2018.withColumn(\"start_timestamp\", unix_timestamp(\"start_timestamp\"))\n", + "df_2018 = df_2018.withColumn(\"end_timestamp\", unix_timestamp(\"end_timestamp\"))\n", + "\n", + "# Calculate trip duration in seconds\n", + "df_2018 = df_2018.withColumn(\"trip_duration\", col(\"end_timestamp\") - col(\"start_timestamp\"))\n", + "\n", + "# Convert seconds to minutes\n", + "df_2018 = df_2018.withColumn(\"trip_duration\", col(\"trip_duration\") / 60)\n", + "\n", + "df_2018.select('trip_duration').show()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "4aef9ce8-cb8d-46d9-a192-a76541afec6c", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.cm as cm\n", + "from sklearn.cluster import DBSCAN\n", + "from sklearn import metrics\n", + "from pyspark.sql.functions import col, radians, acos, sin, cos, lit\n", + "import time\n", + "from pyspark.ml.feature import VectorAssembler\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "e4027b7a-39f5-4aa9-a6cc-937dec6a9952", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "root\n", + " |-- ID: string (nullable = true)\n", + " |-- start_timestamp: long (nullable = true)\n", + " |-- end_timestamp: long (nullable = true)\n", + " |-- seconds: integer (nullable = true)\n", + " |-- miles: double (nullable = true)\n", + " |-- pickup_tract: long (nullable = true)\n", + " |-- dropoff_tract: long (nullable = true)\n", + " |-- pickup_area: integer (nullable = true)\n", + " |-- dropoff_area: integer (nullable = true)\n", + " |-- Fare: double (nullable = true)\n", + " |-- Tip: integer (nullable = true)\n", + " |-- total: double (nullable = true)\n", + " |-- pickup_lat: double (nullable = true)\n", + " |-- pickup_lon: double (nullable = true)\n", + " |-- dropoff_lat: double (nullable = true)\n", + " |-- dropoff_lon: string (nullable = true)\n", + " |-- month: integer (nullable = true)\n", + " |-- day_of_month: integer (nullable = true)\n", + " |-- hour: integer (nullable = true)\n", + " |-- day: integer (nullable = true)\n", + " |-- trip_duration: double (nullable = true)\n", + "\n" + ] + } + ], + "source": [ + "df_all.printSchema()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "34facafd-2864-4ac3-b95f-929cf66e695c", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "23/11/24 21:00:17 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "\tat java.nio.HeapByteBuffer.(HeapByteBuffer.java:57)\n", + "\tat java.nio.ByteBuffer.allocate(ByteBuffer.java:335)\n", + "\tat io.netty.buffer.CompositeByteBuf.nioBuffer(CompositeByteBuf.java:1667)\n", + "\tat io.netty.buffer.AbstractDerivedByteBuf.nioBuffer(AbstractDerivedByteBuf.java:118)\n", + "\tat io.netty.buffer.AbstractByteBuf.nioBuffer(AbstractByteBuf.java:1232)\n", + "\tat org.apache.spark.network.buffer.NettyManagedBuffer.nioByteBuffer(NettyManagedBuffer.java:46)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:94)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "23/11/24 21:00:17 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 437.0 in stage 19.0 (TID 2157) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:24 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "\tat java.nio.HeapByteBuffer.(HeapByteBuffer.java:57)\n", + "\tat java.nio.ByteBuffer.allocate(ByteBuffer.java:335)\n", + "\tat io.netty.buffer.CompositeByteBuf.nioBuffer(CompositeByteBuf.java:1667)\n", + "\tat io.netty.buffer.AbstractDerivedByteBuf.nioBuffer(AbstractDerivedByteBuf.java:118)\n", + "\tat io.netty.buffer.AbstractByteBuf.nioBuffer(AbstractByteBuf.java:1232)\n", + "\tat org.apache.spark.network.buffer.NettyManagedBuffer.nioByteBuffer(NettyManagedBuffer.java:46)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:94)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "23/11/24 21:00:24 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 428.0 in stage 19.0 (TID 2148) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:27 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:27 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 431.0 in stage 19.0 (TID 2151) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:30 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:30 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 444.0 in stage 19.0 (TID 2164) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:33 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:33 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 433.0 in stage 19.0 (TID 2153) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:36 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:36 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 429.0 in stage 19.0 (TID 2149) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:40 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:40 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:40 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 445.0 in stage 19.0 (TID 2165) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:40 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 440.0 in stage 19.0 (TID 2160) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:45 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:45 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 430.0 in stage 19.0 (TID 2150) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:47 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:47 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 454.0 in stage 19.0 (TID 2174) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:50 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:50 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 427.0 in stage 19.0 (TID 2147) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:53 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:53 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 443.0 in stage 19.0 (TID 2163) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:56 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:56 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 441.0 in stage 19.0 (TID 2161) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:00:59 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:00:59 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 453.0 in stage 19.0 (TID 2173) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:02 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:02 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 455.0 in stage 19.0 (TID 2175) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:05 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:05 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 449.0 in stage 19.0 (TID 2169) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:07 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:07 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 458.0 in stage 19.0 (TID 2178) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:12 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:12 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 447.0 in stage 19.0 (TID 2167) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:17 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:19 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:19 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 439.0 in stage 19.0 (TID 2159) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:19 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 450.0 in stage 19.0 (TID 2170) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:25 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:25 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 457.0 in stage 19.0 (TID 2177) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:30 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:30 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 446.0 in stage 19.0 (TID 2166) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:34 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:34 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 451.0 in stage 19.0 (TID 2171) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:39 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:40 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:40 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 442.0 in stage 19.0 (TID 2162) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:40 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 452.0 in stage 19.0 (TID 2172) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:42 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:43 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:43 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 448.0 in stage 19.0 (TID 2168) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:43 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 456.0 in stage 19.0 (TID 2176) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:46 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:46 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 463.0 in stage 19.0 (TID 2183) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:48 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:48 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 462.0 in stage 19.0 (TID 2182) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:51 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:51 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 459.0 in stage 19.0 (TID 2179) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:53 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:53 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 464.0 in stage 19.0 (TID 2184) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:56 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:56 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 460.0 in stage 19.0 (TID 2180) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:01:58 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:01:58 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 467.0 in stage 19.0 (TID 2187) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:00 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:00 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 461.0 in stage 19.0 (TID 2181) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:03 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:03 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 472.0 in stage 19.0 (TID 2192) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:05 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:05 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 474.0 in stage 19.0 (TID 2194) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:08 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:08 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 476.0 in stage 19.0 (TID 2196) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:11 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:11 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 466.0 in stage 19.0 (TID 2186) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:13 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:12 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:13 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 468.0 in stage 19.0 (TID 2188) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:13 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 482.0 in stage 19.0 (TID 2202) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:15 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:15 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 470.0 in stage 19.0 (TID 2190) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:17 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:17 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 479.0 in stage 19.0 (TID 2199) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:19 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:19 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 473.0 in stage 19.0 (TID 2193) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:22 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:22 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 478.0 in stage 19.0 (TID 2198) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:24 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:24 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 471.0 in stage 19.0 (TID 2191) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:26 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:26 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 469.0 in stage 19.0 (TID 2189) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:28 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:28 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 475.0 in stage 19.0 (TID 2195) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:30 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:30 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:30 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 465.0 in stage 19.0 (TID 2185) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:30 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 490.0 in stage 19.0 (TID 2210) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:33 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:33 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 477.0 in stage 19.0 (TID 2197) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:35 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:35 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 493.0 in stage 19.0 (TID 2213) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:38 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:38 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 485.0 in stage 19.0 (TID 2205) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:41 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:41 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 494.0 in stage 19.0 (TID 2214) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:44 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:44 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 492.0 in stage 19.0 (TID 2212) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:46 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:46 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 486.0 in stage 19.0 (TID 2206) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:47 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:47 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 496.0 in stage 19.0 (TID 2216) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:50 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:50 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 487.0 in stage 19.0 (TID 2207) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:53 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:53 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 489.0 in stage 19.0 (TID 2209) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:55 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:55 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 483.0 in stage 19.0 (TID 2203) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:57 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:57 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 495.0 in stage 19.0 (TID 2215) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:02:59 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:02:59 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 484.0 in stage 19.0 (TID 2204) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:01 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:01 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 491.0 in stage 19.0 (TID 2211) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:03 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:03 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 488.0 in stage 19.0 (TID 2208) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:06 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:06 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 508.0 in stage 19.0 (TID 2228) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:07 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:07 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 481.0 in stage 19.0 (TID 2201) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:10 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:10 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 507.0 in stage 19.0 (TID 2227) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:12 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:12 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 480.0 in stage 19.0 (TID 2200) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:15 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:15 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 506.0 in stage 19.0 (TID 2226) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:16 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:17 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:17 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 505.0 in stage 19.0 (TID 2225) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:17 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 504.0 in stage 19.0 (TID 2224) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:19 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:20 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:20 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 498.0 in stage 19.0 (TID 2218) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:20 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 437.1 in stage 19.0 (TID 2240) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:22 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:22 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 499.0 in stage 19.0 (TID 2219) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:24 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:24 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 503.0 in stage 19.0 (TID 2223) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:27 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:27 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 501.0 in stage 19.0 (TID 2221) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:29 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:29 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 497.0 in stage 19.0 (TID 2217) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:31 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:31 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 500.0 in stage 19.0 (TID 2220) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:34 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:34 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:34 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 514.0 in stage 19.0 (TID 2234) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:34 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 502.0 in stage 19.0 (TID 2222) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:36 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:36 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:36 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 519.0 in stage 19.0 (TID 2239) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:36 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 516.0 in stage 19.0 (TID 2236) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:39 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:39 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 512.0 in stage 19.0 (TID 2232) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:41 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:41 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 517.0 in stage 19.0 (TID 2237) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:43 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:43 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:43 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 515.0 in stage 19.0 (TID 2235) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:43 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 510.0 in stage 19.0 (TID 2230) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:46 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:46 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 509.0 in stage 19.0 (TID 2229) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:48 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:48 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 513.0 in stage 19.0 (TID 2233) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:49 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:49 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 511.0 in stage 19.0 (TID 2231) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:52 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:52 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 520.0 in stage 19.0 (TID 2241) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:53 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:53 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 518.0 in stage 19.0 (TID 2238) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:55 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:55 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:55 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 523.0 in stage 19.0 (TID 2244) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:55 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 521.0 in stage 19.0 (TID 2242) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:03:58 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:03:58 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 522.0 in stage 19.0 (TID 2243) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:00 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:00 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 524.0 in stage 19.0 (TID 2245) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:03 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:03 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:03 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 527.0 in stage 19.0 (TID 2248) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:03 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 534.0 in stage 19.0 (TID 2255) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:06 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:06 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 533.0 in stage 19.0 (TID 2254) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:08 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:08 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 526.0 in stage 19.0 (TID 2247) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:09 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:09 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 428.1 in stage 19.0 (TID 2256) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:12 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:12 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 535.0 in stage 19.0 (TID 2257) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:13 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:13 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 525.0 in stage 19.0 (TID 2246) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:16 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:16 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 543.0 in stage 19.0 (TID 2265) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:18 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:18 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 530.0 in stage 19.0 (TID 2251) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:21 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:21 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 544.0 in stage 19.0 (TID 2266) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:22 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:22 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 531.0 in stage 19.0 (TID 2252) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:24 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:24 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 431.1 in stage 19.0 (TID 2269) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:26 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:26 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 529.0 in stage 19.0 (TID 2250) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:27 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:28 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:28 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 532.0 in stage 19.0 (TID 2253) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:28 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 528.0 in stage 19.0 (TID 2249) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:30 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:30 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 545.0 in stage 19.0 (TID 2267) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:33 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:33 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:33 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 536.0 in stage 19.0 (TID 2258) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:34 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 444.1 in stage 19.0 (TID 2270) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:36 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:36 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 538.0 in stage 19.0 (TID 2260) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:39 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:39 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 433.1 in stage 19.0 (TID 2274) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:41 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:41 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 540.0 in stage 19.0 (TID 2262) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:43 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:43 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 429.1 in stage 19.0 (TID 2273) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:45 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:45 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 537.0 in stage 19.0 (TID 2259) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:48 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:48 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 546.0 in stage 19.0 (TID 2268) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:51 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:51 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 539.0 in stage 19.0 (TID 2261) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:54 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:54 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 548.0 in stage 19.0 (TID 2272) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:55 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:55 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 542.0 in stage 19.0 (TID 2264) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:58 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:58 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 547.0 in stage 19.0 (TID 2271) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:04:59 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:04:59 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 541.0 in stage 19.0 (TID 2263) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:02 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:02 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 554.0 in stage 19.0 (TID 2280) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:03 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:03 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 559.0 in stage 19.0 (TID 2285) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:06 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:06 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 558.0 in stage 19.0 (TID 2284) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:08 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:08 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 440.1 in stage 19.0 (TID 2288) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 6): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:10 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:10 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:10 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 550.0 in stage 19.0 (TID 2276) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:10 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 556.0 in stage 19.0 (TID 2282) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:13 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:13 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 555.0 in stage 19.0 (TID 2281) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:15 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:15 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 552.0 in stage 19.0 (TID 2278) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:17 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:17 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 557.0 in stage 19.0 (TID 2283) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:19 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:20 WARN org.apache.spark.storage.BlockManager: Failed to fetch block after 1 fetch failures. Most recent failure cause:\n", + "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", + "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:301)\n", + "\tat org.apache.spark.network.BlockTransferService.fetchBlockSync(BlockTransferService.scala:103)\n", + "\tat org.apache.spark.storage.BlockManager.fetchRemoteManagedBuffer(BlockManager.scala:1069)\n", + "\tat org.apache.spark.storage.BlockManager.$anonfun$getRemoteBlock$8(BlockManager.scala:1013)\n", + "\tat scala.Option.orElse(Option.scala:447)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBlock(BlockManager.scala:1013)\n", + "\tat org.apache.spark.storage.BlockManager.getRemoteBytes(BlockManager.scala:1151)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.$anonfun$run$1(TaskResultGetter.scala:88)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)\n", + "\tat org.apache.spark.scheduler.TaskResultGetter$$anon$3.run(TaskResultGetter.scala:63)\n", + "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n", + "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n", + "\tat java.lang.Thread.run(Thread.java:750)\n", + "Caused by: java.util.concurrent.ExecutionException: Boxed Error\n", + "\tat scala.concurrent.impl.Promise$.resolver(Promise.scala:87)\n", + "\tat scala.concurrent.impl.Promise$.scala$concurrent$impl$Promise$$resolveTry(Promise.scala:79)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)\n", + "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", + "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", + "\tat scala.concurrent.Promise.failure(Promise.scala:104)\n", + "\tat scala.concurrent.Promise.failure$(Promise.scala:104)\n", + "\tat scala.concurrent.impl.Promise$DefaultPromise.failure(Promise.scala:187)\n", + "\tat org.apache.spark.network.BlockTransferService$$anon$1.onBlockFetchSuccess(BlockTransferService.scala:98)\n", + "\tat org.apache.spark.network.shuffle.RetryingBlockFetcher$RetryingBlockFetchListener.onBlockFetchSuccess(RetryingBlockFetcher.java:216)\n", + "\tat org.apache.spark.network.shuffle.OneForOneBlockFetcher$ChunkCallback.onSuccess(OneForOneBlockFetcher.java:190)\n", + "\tat org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:171)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142)\n", + "\tat org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53)\n", + "\tat io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)\n", + "\tat io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)\n", + "\tat io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)\n", + "\tat io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\n", + "\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650)\n", + "\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576)\n", + "\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)\n", + "\tat io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989)\n", + "\tat io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\n", + "\tat io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\n", + "\t... 1 more\n", + "Caused by: java.lang.OutOfMemoryError: Java heap space\n", + "23/11/24 21:05:20 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 549.0 in stage 19.0 (TID 2275) (hub-msca-bdp-dphub-students-test-ridhi-w-0.c.msca-bdp-student-ap.internal executor 3): TaskResultLost (result lost from block manager)\n", + "23/11/24 21:05:20 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 551.0 in stage 19.0 (TID 2277) (hub-msca-bdp-dphub-students-test-ridhi-w-1.c.msca-bdp-student-ap.internal executor 7): TaskResultLost (result lost from block manager)\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[8], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m pdf_all \u001b[38;5;241m=\u001b[39m \u001b[43mdf_all\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mselect\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mID\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mpickup_lat\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mpickup_lon\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mdropoff_lat\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mdropoff_lon\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mmonth\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mday_of_month\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mday\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtrip_duration\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtoPandas\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/usr/lib/spark/python/pyspark/sql/pandas/conversion.py:141\u001b[0m, in \u001b[0;36mPandasConversionMixin.toPandas\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 138\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m\n\u001b[1;32m 140\u001b[0m \u001b[38;5;66;03m# Below is toPandas without Arrow optimization.\u001b[39;00m\n\u001b[0;32m--> 141\u001b[0m pdf \u001b[38;5;241m=\u001b[39m pd\u001b[38;5;241m.\u001b[39mDataFrame\u001b[38;5;241m.\u001b[39mfrom_records(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m, columns\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns)\n\u001b[1;32m 142\u001b[0m column_counter \u001b[38;5;241m=\u001b[39m Counter(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns)\n\u001b[1;32m 144\u001b[0m dtype \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;01mNone\u001b[39;00m] \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mschema)\n", + "File \u001b[0;32m/usr/lib/spark/python/pyspark/sql/dataframe.py:677\u001b[0m, in \u001b[0;36mDataFrame.collect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 667\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Returns all the records as a list of :class:`Row`.\u001b[39;00m\n\u001b[1;32m 668\u001b[0m \n\u001b[1;32m 669\u001b[0m \u001b[38;5;124;03m.. versionadded:: 1.3.0\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 674\u001b[0m \u001b[38;5;124;03m[Row(age=2, name='Alice'), Row(age=5, name='Bob')]\u001b[39;00m\n\u001b[1;32m 675\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 676\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m SCCallSiteSync(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sc) \u001b[38;5;28;01mas\u001b[39;00m css:\n\u001b[0;32m--> 677\u001b[0m sock_info \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_jdf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollectToPython\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 678\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mlist\u001b[39m(_load_from_socket(sock_info, BatchedSerializer(PickleSerializer())))\n", + "File \u001b[0;32m/usr/lib/spark/python/lib/py4j-0.10.9-src.zip/py4j/java_gateway.py:1303\u001b[0m, in \u001b[0;36mJavaMember.__call__\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 1296\u001b[0m args_command, temp_args \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_build_args(\u001b[38;5;241m*\u001b[39margs)\n\u001b[1;32m 1298\u001b[0m command \u001b[38;5;241m=\u001b[39m proto\u001b[38;5;241m.\u001b[39mCALL_COMMAND_NAME \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1299\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcommand_header \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1300\u001b[0m args_command \u001b[38;5;241m+\u001b[39m\\\n\u001b[1;32m 1301\u001b[0m proto\u001b[38;5;241m.\u001b[39mEND_COMMAND_PART\n\u001b[0;32m-> 1303\u001b[0m answer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgateway_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend_command\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcommand\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1304\u001b[0m return_value \u001b[38;5;241m=\u001b[39m get_return_value(\n\u001b[1;32m 1305\u001b[0m answer, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgateway_client, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtarget_id, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname)\n\u001b[1;32m 1307\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m temp_arg \u001b[38;5;129;01min\u001b[39;00m temp_args:\n", + "File \u001b[0;32m/usr/lib/spark/python/lib/py4j-0.10.9-src.zip/py4j/java_gateway.py:1033\u001b[0m, in \u001b[0;36mGatewayClient.send_command\u001b[0;34m(self, command, retry, binary)\u001b[0m\n\u001b[1;32m 1031\u001b[0m connection \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_connection()\n\u001b[1;32m 1032\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1033\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend_command\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcommand\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1034\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m binary:\n\u001b[1;32m 1035\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m response, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_create_connection_guard(connection)\n", + "File \u001b[0;32m/usr/lib/spark/python/lib/py4j-0.10.9-src.zip/py4j/java_gateway.py:1200\u001b[0m, in \u001b[0;36mGatewayConnection.send_command\u001b[0;34m(self, command)\u001b[0m\n\u001b[1;32m 1196\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m Py4JNetworkError(\n\u001b[1;32m 1197\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mError while sending\u001b[39m\u001b[38;5;124m\"\u001b[39m, e, proto\u001b[38;5;241m.\u001b[39mERROR_ON_SEND)\n\u001b[1;32m 1199\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1200\u001b[0m answer \u001b[38;5;241m=\u001b[39m smart_decode(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mreadline\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m[:\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m])\n\u001b[1;32m 1201\u001b[0m logger\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAnswer received: \u001b[39m\u001b[38;5;132;01m{0}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mformat(answer))\n\u001b[1;32m 1202\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m answer\u001b[38;5;241m.\u001b[39mstartswith(proto\u001b[38;5;241m.\u001b[39mRETURN_MESSAGE):\n", + "File \u001b[0;32m/opt/conda/miniconda3/lib/python3.8/socket.py:669\u001b[0m, in \u001b[0;36mSocketIO.readinto\u001b[0;34m(self, b)\u001b[0m\n\u001b[1;32m 667\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m 668\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 669\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_sock\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrecv_into\u001b[49m\u001b[43m(\u001b[49m\u001b[43mb\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 670\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m timeout:\n\u001b[1;32m 671\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_timeout_occurred \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "pdf_all = df_2018.select('ID', 'pickup_lat','pickup_lon', 'dropoff_lat', 'dropoff_lon', 'month','day_of_month', 'day', 'trip_duration').toPandas()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "8eefb77b-6fd3-46e3-b20b-7c105168ab84", + "metadata": {}, + "outputs": [ + { + "ename": "TypeError", + "evalue": "__init__() got an unexpected keyword argument 'minPoints'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[25], line 6\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m#assembler = VectorAssembler(inputCols=[\"pickup_lon\", \"pickup_lat\"], outputCol=\"pickup_features\")\u001b[39;00m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;66;03m#df_all = assembler.transform(df_all)\u001b[39;00m\n\u001b[1;32m 3\u001b[0m \n\u001b[1;32m 4\u001b[0m \u001b[38;5;66;03m# Fit DBSCAN model for pickup location\u001b[39;00m\n\u001b[1;32m 5\u001b[0m epsilon \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1.5\u001b[39m \u001b[38;5;241m/\u001b[39m \u001b[38;5;241m6371\u001b[39m\n\u001b[0;32m----> 6\u001b[0m dbscan \u001b[38;5;241m=\u001b[39m \u001b[43mDBSCAN\u001b[49m\u001b[43m(\u001b[49m\u001b[43meps\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mepsilon\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mminPoints\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputCol\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpickup_features\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43moutputCol\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpickup_prediction\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 7\u001b[0m model_pickup \u001b[38;5;241m=\u001b[39m dbscan\u001b[38;5;241m.\u001b[39mfit(df_all)\n\u001b[1;32m 8\u001b[0m df_all \u001b[38;5;241m=\u001b[39m model_pickup\u001b[38;5;241m.\u001b[39mtransform(df_all)\n", + "File \u001b[0;32m/opt/conda/miniconda3/lib/python3.8/site-packages/sklearn/utils/validation.py:63\u001b[0m, in \u001b[0;36m_deprecate_positional_args.._inner_deprecate_positional_args..inner_f\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 61\u001b[0m extra_args \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlen\u001b[39m(args) \u001b[38;5;241m-\u001b[39m \u001b[38;5;28mlen\u001b[39m(all_args)\n\u001b[1;32m 62\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m extra_args \u001b[38;5;241m<\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[0;32m---> 63\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mf\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 65\u001b[0m \u001b[38;5;66;03m# extra_args > 0\u001b[39;00m\n\u001b[1;32m 66\u001b[0m args_msg \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m=\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mformat(name, arg)\n\u001b[1;32m 67\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m name, arg \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(kwonly_args[:extra_args],\n\u001b[1;32m 68\u001b[0m args[\u001b[38;5;241m-\u001b[39mextra_args:])]\n", + "\u001b[0;31mTypeError\u001b[0m: __init__() got an unexpected keyword argument 'minPoints'" + ] + } + ], + "source": [ + "# Fit DBSCAN model for pickup location using scikit-learn\n", + "epsilon_pickup = 1.5 / 6371 # Earth's radius in kilometers\n", + "dbscan_pickup = DBSCAN(eps=epsilon_pickup, min_samples=5)\n", + "pickup_features = np.array(pdf_all[[\"pickup_lon\", \"pickup_lat\"]])\n", + "pdf_all[\"pickup_prediction\"] = dbscan_pickup.fit_predict(pickup_features)\n", + "\n", + "# Filter out noise points for pickup location (prediction == -1)\n", + "pdf_all = pdf_all[pdf_all[\"pickup_prediction\"] != -1]\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fb2f2533-f232-40f2-854b-cecfc222c172", + "metadata": {}, + "outputs": [], + "source": [ + "assembler = VectorAssembler(inputCols=[\"pickup_lon\", \"pickup_lat\"], outputCol=\"pickup_features\")\n", + "df_all = assembler.transform(df_all)\n", + "\n", + "# Fit DBSCAN model for pickup location\n", + "epsilon = 1.5 / kms_per_rad\n", + "dbscan = DBSCAN(eps=epsilon, minPoints=1, inputCol=\"pickup_features\", outputCol=\"pickup_prediction\")\n", + "model_pickup = dbscan.fit(df_all)\n", + "df_all = model_pickup.transform(df_all)\n", + "\n", + "# Filter out noise points for pickup location (prediction == -1)\n", + "df_all = df_all.filter(col(\"pickup_prediction\") != -1)\n", + "\n", + "# Assemble features into a vector for dropoff location\n", + "assembler = VectorAssembler(inputCols=[\"dropoff_lon\", \"dropoff_lat\"], outputCol=\"dropoff_features\")\n", + "df_all = assembler.transform(df_all)\n", + "\n", + "# Fit DBSCAN model for dropoff location\n", + "model_dropoff = dbscan.fit(df_all)\n", + "df_all = model_dropoff.transform(df_all)\n", + "\n", + "# Filter out noise points for dropoff location (prediction == -1)\n", + "df_all = df_all.filter(col(\"dropoff_prediction\") != -1)\n", + "\n", + "# Collect cluster labels and centroids for pickup location\n", + "cluster_labels_pickup = df_all.select(\"pickup_prediction\").distinct().rdd.flatMap(lambda x: x).collect()\n", + "clusters_pickup = [df_all.filter(col(\"pickup_prediction\") == label).select(\"pickup_lon\", \"pickup_lat\").collect() for label in cluster_labels_pickup]\n", + "\n", + "# Collect cluster labels and centroids for dropoff location\n", + "cluster_labels_dropoff = df_all.select(\"dropoff_prediction\").distinct().rdd.flatMap(lambda x: x).collect()\n", + "clusters_dropoff = [df_all.filter(col(\"dropoff_prediction\") == label).select(\"dropoff_lon\", \"dropoff_lat\").collect() for label in cluster_labels_dropoff]\n", + "\n", + "# Find the point in each cluster that is closest to its centroid for pickup location\n", + "centermost_points_pickup = [get_centermost_point(cluster) for cluster in clusters_pickup]\n", + "\n", + "# Find the point in each cluster that is closest to its centroid for dropoff location\n", + "centermost_points_dropoff = [get_centermost_point(cluster) for cluster in clusters_dropoff]\n", + "\n", + "# Unzip the list of centermost points (lat, lon) tuples into separate lat and lon lists for pickup location\n", + "lats_pickup, lons_pickup = zip(*centermost_points_pickup)\n", + "rep_points_pickup = spark.createDataFrame(list(zip(lons_pickup, lats_pickup)), [\"pickup_lon\", \"pickup_lat\"])\n", + "\n", + "# Unzip the list of centermost points (lat, lon) tuples into separate lat and lon lists for dropoff location\n", + "lats_dropoff, lons_dropoff = zip(*centermost_points_dropoff)\n", + "rep_points_dropoff = spark.createDataFrame(list(zip(lons_dropoff, lats_dropoff)), [\"dropoff_lon\", \"dropoff_lat\"])\n", + "\n", + "# Pull rows from the original data set where lat/lon match the lat/lon of each row of representative points for pickup location\n", + "rs_pickup = rep_points_pickup.join(df_all, [\"pickup_lon\", \"pickup_lat\"])\n", + "\n", + "# Pull rows from the original data set where lat/lon match the lat/lon of each row of representative points for dropoff location\n", + "rs_dropoff = rep_points_dropoff.join(df_all, [\"dropoff_lon\", \"dropoff_lat\"])\n", + "\n", + "# All done, print outcome\n", + "message_pickup = 'Clustered {:,} pickup points down to {:,} points, for {:.2f}% compression in {:,.2f} seconds.'\n", + "message_dropoff = 'Clustered {:,} dropoff points down to {:,} points, for {:.2f}% compression in {:,.2f} seconds.'\n", + "\n", + "print(message_pickup.format(df_all.count(), rs_pickup.count(), 100*(1 - float(rs_pickup.count()) / df_all.count()), time.time()-start_time))\n", + "print(message_dropoff.format(df_all.count(), rs_dropoff.count(), 100*(1 - float(rs_dropoff.count()) / df_all.count()), time.time()-start_time))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PySpark", + "language": "python", + "name": "pyspark" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.15" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}