Skip to content

Commit

Permalink
logging confs of spark context and session
Browse files Browse the repository at this point in the history
  • Loading branch information
lmassaoy committed Feb 10, 2025
1 parent 9998c4a commit 8cf09cf
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@ public class NuEventEmitter {
private static final Set<String> WANTED_EVENT_NAME_SUBSTRINGS = new HashSet<>(
Arrays.asList(
".execute_insert_into_hadoop_fs_relation_command.",
".adaptive_spark_plan.",
"."
".adaptive_spark_plan."
)
);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,16 @@

import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Stack;
import java.util.concurrent.atomic.AtomicBoolean;

import lombok.extern.slf4j.Slf4j;
import scala.Tuple2;

import org.apache.spark.scheduler.ActiveJob;
import org.apache.spark.scheduler.JobFailed;
import org.apache.spark.scheduler.SparkListenerApplicationEnd;
Expand Down Expand Up @@ -80,8 +84,12 @@ public SparkSQLExecutionContext(
@Override
public void start(SparkListenerSQLExecutionStart startEvent) {
log.info("SparkListenerSQLExecutionStart - executionId: {}", startEvent.executionId());
Tuple2<String,String>[] allConfs = olContext.getSparkContext().get().getConf().getAll();
for (Tuple2<String,String> conf : allConfs) {
log.info("SparkListenerSQLExecutionStart - conf: {}", conf.toString());
}
log.info("SparkListenerSQLExecutionStart - mapOfConfsFromContext: {}", olContext.getSparkSession().get().conf().getAll().toString());
log.info("SparkListenerSQLExecutionStart - event: {}", startEvent.toString());
log.info("SparkListenerSQLExecutionStart - event.sparkPlanInfo: {}", startEvent.sparkPlanInfo());
if (log.isDebugEnabled()) {
log.debug("SparkListenerSQLExecutionStart - executionId: {}", startEvent.executionId());
}
Expand Down Expand Up @@ -120,7 +128,6 @@ public void start(SparkListenerSQLExecutionStart startEvent) {
@Override
public void end(SparkListenerSQLExecutionEnd endEvent) {
log.info("SparkListenerSQLExecutionEnd - executionId: {}", endEvent.executionId());
log.info("SparkListenerSQLExecutionEnd - event: {}", endEvent.toString());
if (log.isDebugEnabled()) {
log.debug("SparkListenerSQLExecutionEnd - executionId: {}", endEvent.executionId());
}
Expand Down

0 comments on commit 8cf09cf

Please sign in to comment.