Skip to content

Commit 16c56bf

Browse files
Add logs for creation/completion of datadog spark listener (#6943)
Easier troubleshooting to validate that everything is running as expected
1 parent f015cfc commit 16c56bf

1 file changed

Lines changed: 11 additions & 0 deletions

File tree

dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,8 @@
4141
import org.apache.spark.sql.streaming.StateOperatorProgress;
4242
import org.apache.spark.sql.streaming.StreamingQueryListener;
4343
import org.apache.spark.sql.streaming.StreamingQueryProgress;
44+
import org.slf4j.Logger;
45+
import org.slf4j.LoggerFactory;
4446
import scala.Tuple2;
4547
import scala.collection.JavaConverters;
4648

@@ -53,6 +55,7 @@
5355
* still needed
5456
*/
5557
public abstract class AbstractDatadogSparkListener extends SparkListener {
58+
private static final Logger log = LoggerFactory.getLogger(AbstractDatadogSparkListener.class);
5659
public static volatile AbstractDatadogSparkListener listener = null;
5760
public static volatile boolean finishTraceOnApplicationEnd = true;
5861
public static volatile boolean isPysparkShell = false;
@@ -121,6 +124,8 @@ public AbstractDatadogSparkListener(SparkConf sparkConf, String appId, String sp
121124
databricksClusterName = sparkConf.get("spark.databricks.clusterUsageTags.clusterName", null);
122125
databricksServiceName = getDatabricksServiceName(sparkConf, databricksClusterName);
123126
sparkServiceName = getSparkServiceName(sparkConf, isRunningOnDatabricks);
127+
128+
log.info("Created datadog spark listener: {}", this.getClass().getSimpleName());
124129
}
125130

126131
/** Resource name of the spark job. Provide an implementation based on a specific scala version */
@@ -173,13 +178,19 @@ private void initApplicationSpanIfNotInitialized() {
173178

174179
@Override
175180
public void onApplicationEnd(SparkListenerApplicationEnd applicationEnd) {
181+
log.info(
182+
"Received spark application end event, finish trace on this event: {}",
183+
finishTraceOnApplicationEnd);
184+
176185
if (finishTraceOnApplicationEnd) {
177186
finishApplication(applicationEnd.time(), null, 0, null);
178187
}
179188
}
180189

181190
public synchronized void finishApplication(
182191
long time, Throwable throwable, int exitCode, String msg) {
192+
log.info("Finishing spark application trace");
193+
183194
if (applicationEnded) {
184195
return;
185196
}

0 commit comments

Comments
 (0)