Skip to content
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ configurations.configureEach {
dependencies {
compileOnly group: 'org.apache.spark', name: 'spark-core_2.12', version: '2.4.0'
compileOnly group: 'org.apache.spark', name: 'spark-sql_2.12', version: '2.4.0'
compileOnly group: 'org.apache.spark', name: 'spark-launcher_2.12', version: '2.4.0'

testFixturesImplementation group: 'com.datadoghq', name: 'sketches-java', version: '0.8.2'
testFixturesImplementation group: 'com.google.protobuf', name: 'protobuf-java', version: '3.14.0'
Expand All @@ -21,7 +22,12 @@ dependencies {
testFixturesCompileOnly group: 'org.apache.spark', name: 'spark-core_2.12', version: '2.4.0'
testFixturesCompileOnly group: 'org.apache.spark', name: 'spark-sql_2.12', version: '2.4.0'
testFixturesCompileOnly group: 'org.apache.spark', name: 'spark-yarn_2.12', version: '2.4.0'
testFixturesCompileOnly group: 'org.apache.spark', name: 'spark-launcher_2.12', version: '2.4.0'

testFixturesCompileOnly(libs.bundles.groovy)
testFixturesCompileOnly(libs.bundles.spock)

testImplementation project(':dd-java-agent:instrumentation-testing')
testImplementation group: 'org.apache.spark', name: 'spark-launcher_2.12', version: '2.4.0'
}

Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,12 @@
* @see <a href="https://spark.apache.org/docs/latest/configuration.html">Spark Configuration</a>
*/
class SparkConfAllowList {
// Using values from
// https://github.com/apache/spark/blob/v3.5.1/core/src/main/scala/org/apache/spark/internal/config/package.scala#L1150-L1158
static final String DEFAULT_REDACTION_REGEX = "(?i)secret|password|token|access.key|api.key";

private static final Pattern DEFAULT_REDACTION_PATTERN = Pattern.compile(DEFAULT_REDACTION_REGEX);

/**
* Job-specific parameters that can be used to control job execution or provide metadata about the
* job being executed
Expand Down Expand Up @@ -80,11 +86,17 @@ public static boolean canCaptureJobParameter(String parameterName) {
return allowedJobParams.contains(parameterName);
}

/** Redact a value if the key or value matches the default redaction pattern. */
public static String redactValue(String key, String value) {
if (DEFAULT_REDACTION_PATTERN.matcher(key).find()
|| DEFAULT_REDACTION_PATTERN.matcher(value).find()) {
return "[redacted]";
}
return value;
}

public static List<Map.Entry<String, String>> getRedactedSparkConf(SparkConf conf) {
// Using values from
// https://github.com/apache/spark/blob/v3.5.1/core/src/main/scala/org/apache/spark/internal/config/package.scala#L1150-L1158
String redactionPattern =
conf.get("spark.redaction.regex", "(?i)secret|password|token|access.key|api.key");
String redactionPattern = conf.get("spark.redaction.regex", DEFAULT_REDACTION_REGEX);
List<Map.Entry<String, String>> redacted = new ArrayList<>();
Pattern pattern = Pattern.compile(redactionPattern);

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
package datadog.trace.instrumentation.spark;

import datadog.trace.api.DDTags;
import datadog.trace.api.sampling.PrioritySampling;
import datadog.trace.api.sampling.SamplingMechanism;
import datadog.trace.bootstrap.instrumentation.api.AgentSpan;
import datadog.trace.bootstrap.instrumentation.api.AgentTracer;
import java.lang.reflect.Field;
import java.util.Map;
import net.bytebuddy.asm.Advice;
import org.apache.spark.launcher.SparkAppHandle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class SparkLauncherAdvice {

private static final Logger log = LoggerFactory.getLogger(SparkLauncherAdvice.class);

static volatile AgentSpan launcherSpan;

private static volatile boolean shutdownHookRegistered = false;

/** Extract SparkLauncher configuration via reflection and set as span tags. */
private static void setLauncherConfigTags(AgentSpan span, Object launcher) {
try {
// SparkLauncher extends AbstractLauncher which has a 'builder' field
Field builderField = launcher.getClass().getSuperclass().getDeclaredField("builder");
builderField.setAccessible(true);
Object builder = builderField.get(launcher);
if (builder == null) {
return;
}

Class<?> builderClass = builder.getClass();
// Fields are on AbstractCommandBuilder (parent of SparkSubmitCommandBuilder)
Class<?> abstractBuilderClass = builderClass.getSuperclass();

setStringFieldAsTag(span, builder, abstractBuilderClass, "master", "master");
setStringFieldAsTag(span, builder, abstractBuilderClass, "deployMode", "deploy_mode");
setStringFieldAsTag(span, builder, abstractBuilderClass, "appName", "application_name");
setStringFieldAsTag(span, builder, abstractBuilderClass, "mainClass", "main_class");
setStringFieldAsTag(span, builder, abstractBuilderClass, "appResource", "app_resource");

// Extract spark conf entries and redact sensitive values
try {
Field confField = abstractBuilderClass.getDeclaredField("conf");
confField.setAccessible(true);
@SuppressWarnings("unchecked")
Map<String, String> conf = (Map<String, String>) confField.get(builder);
if (conf != null) {
for (Map.Entry<String, String> entry : conf.entrySet()) {
if (SparkConfAllowList.canCaptureJobParameter(entry.getKey())) {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can't we use datadog.trace.instrumentation.spark.SparkConfAllowList#getRedactedSparkConf same way datadog.trace.instrumentation.spark.AbstractDatadogSparkListener#captureJobParameters ?

String value = SparkConfAllowList.redactValue(entry.getKey(), entry.getValue());
span.setTag("config." + entry.getKey().replace('.', '_'), value);
}
}
}
} catch (NoSuchFieldException e) {
log.debug("Could not find conf field on builder", e);
}
} catch (Exception e) {
log.debug("Failed to extract SparkLauncher configuration", e);
}
}

private static void setStringFieldAsTag(
AgentSpan span, Object obj, Class<?> clazz, String fieldName, String tagName) {
try {
Field field = clazz.getDeclaredField(fieldName);
field.setAccessible(true);
Object value = field.get(obj);
if (value != null) {
span.setTag(tagName, value.toString());
}
} catch (Exception e) {
log.debug("Could not read field {} from builder", fieldName, e);
}
}

public static synchronized void createLauncherSpan(Object launcher) {
if (launcherSpan != null) {
return;
}

AgentTracer.TracerAPI tracer = AgentTracer.get();
AgentSpan span =
tracer
.buildSpan("spark.launcher.launch")
.withSpanType("spark")
.withResourceName("SparkLauncher.startApplication")
.start();
span.setSamplingPriority(PrioritySampling.USER_KEEP, SamplingMechanism.DATA_JOBS);
setLauncherConfigTags(span, launcher);
launcherSpan = span;

if (!shutdownHookRegistered) {
shutdownHookRegistered = true;
Runtime.getRuntime()
.addShutdownHook(
new Thread(
() -> {
synchronized (SparkLauncherAdvice.class) {
AgentSpan s = launcherSpan;
if (s != null) {
log.info("Finishing spark.launcher span from shutdown hook");
s.finish();
launcherSpan = null;
}
}
}));
}
}

public static synchronized void finishSpan(boolean isError, String errorMessage) {
AgentSpan span = launcherSpan;
if (span == null) {
return;
}
if (isError) {
span.setError(true);
span.setTag(DDTags.ERROR_TYPE, "Spark Launcher Failed");
span.setTag(DDTags.ERROR_MSG, errorMessage);
}
span.finish();
launcherSpan = null;
}

public static synchronized void finishSpanWithThrowable(Throwable throwable) {
AgentSpan span = launcherSpan;
if (span == null) {
return;
}
if (throwable != null) {
span.addThrowable(throwable);
}
span.finish();
launcherSpan = null;
}

public static class StartApplicationAdvice {
@Advice.OnMethodExit(suppress = Throwable.class, onThrowable = Throwable.class)
public static void exit(
@Advice.This Object launcher,
@Advice.Return SparkAppHandle handle,
@Advice.Thrown Throwable throwable) {
createLauncherSpan(launcher);

if (throwable != null) {
finishSpanWithThrowable(throwable);
return;
}

if (handle != null) {
try {
handle.addListener(new AppHandleListener());
} catch (Exception e) {
log.debug("Failed to register SparkAppHandle listener", e);
}
}
}
}

public static class AppHandleListener implements SparkAppHandle.Listener {
Comment thread
pawel-big-lebowski marked this conversation as resolved.
Outdated
@Override
public void stateChanged(SparkAppHandle handle) {
SparkAppHandle.State state = handle.getState();
AgentSpan span = launcherSpan;
if (span != null) {
span.setTag("spark.launcher.app_state", state.toString());

String appId = handle.getAppId();
if (appId != null) {
span.setTag("spark.app_id", appId);
}

if (state.isFinal()) {
if (state == SparkAppHandle.State.FAILED
|| state == SparkAppHandle.State.KILLED
|| state == SparkAppHandle.State.LOST) {
finishSpan(true, "Application " + state);
} else {
finishSpan(false, null);
}
}
}
}

@Override
public void infoChanged(SparkAppHandle handle) {
AgentSpan span = launcherSpan;
if (span != null) {
String appId = handle.getAppId();
if (appId != null) {
span.setTag("spark.app_id", appId);
}
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package datadog.trace.instrumentation.spark;

import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.isDeclaredBy;
import static net.bytebuddy.matcher.ElementMatchers.isMethod;

import com.google.auto.service.AutoService;
import datadog.trace.agent.tooling.Instrumenter;
import datadog.trace.agent.tooling.InstrumenterModule;
import datadog.trace.api.InstrumenterConfig;

@AutoService(InstrumenterModule.class)
public class SparkLauncherInstrumentation extends InstrumenterModule.Tracing
implements Instrumenter.ForSingleType, Instrumenter.HasMethodAdvice {

public SparkLauncherInstrumentation() {
super("spark-launcher");
}

@Override
protected boolean defaultEnabled() {
return InstrumenterConfig.get().isDataJobsEnabled();
}

@Override
public String instrumentedType() {
return "org.apache.spark.launcher.SparkLauncher";
}

@Override
public String[] helperClassNames() {
return new String[] {
packageName + ".SparkConfAllowList",
packageName + ".SparkLauncherAdvice",
packageName + ".SparkLauncherAdvice$AppHandleListener",
};
}

@Override
public void methodAdvice(MethodTransformer transformer) {
transformer.applyAdvice(
isMethod()
.and(named("startApplication"))
.and(isDeclaredBy(named("org.apache.spark.launcher.SparkLauncher"))),
packageName + ".SparkLauncherAdvice$StartApplicationAdvice");
}
}
Loading
Loading