-
-
Notifications
You must be signed in to change notification settings - Fork 468
ref(kafka): [Queue Instrumentation 14] Extract sentry-kafka module #5288
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: fix/queue-instrumentation-enqueued-time-seconds
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,5 @@ | ||
| # sentry-kafka | ||
|
|
||
| This module provides Kafka-native queue instrumentation for applications using `kafka-clients` directly. | ||
|
|
||
| Spring users should use `sentry-spring-boot-jakarta` / `sentry-spring-jakarta`, which provide higher-fidelity consumer instrumentation via Spring Kafka hooks. |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,25 @@ | ||
| public final class io/sentry/kafka/BuildConfig { | ||
| public static final field SENTRY_KAFKA_SDK_NAME Ljava/lang/String; | ||
| public static final field VERSION_NAME Ljava/lang/String; | ||
| } | ||
|
|
||
| public final class io/sentry/kafka/SentryKafkaConsumerInterceptor : org/apache/kafka/clients/consumer/ConsumerInterceptor { | ||
| public static final field TRACE_ORIGIN Ljava/lang/String; | ||
| public fun <init> (Lio/sentry/IScopes;)V | ||
| public fun close ()V | ||
| public fun configure (Ljava/util/Map;)V | ||
| public fun onCommit (Ljava/util/Map;)V | ||
| public fun onConsume (Lorg/apache/kafka/clients/consumer/ConsumerRecords;)Lorg/apache/kafka/clients/consumer/ConsumerRecords; | ||
| } | ||
|
|
||
| public final class io/sentry/kafka/SentryKafkaProducerInterceptor : org/apache/kafka/clients/producer/ProducerInterceptor { | ||
| public static final field SENTRY_ENQUEUED_TIME_HEADER Ljava/lang/String; | ||
| public static final field TRACE_ORIGIN Ljava/lang/String; | ||
| public fun <init> (Lio/sentry/IScopes;)V | ||
| public fun <init> (Lio/sentry/IScopes;Ljava/lang/String;)V | ||
| public fun close ()V | ||
| public fun configure (Ljava/util/Map;)V | ||
| public fun onAcknowledgement (Lorg/apache/kafka/clients/producer/RecordMetadata;Ljava/lang/Exception;)V | ||
| public fun onSend (Lorg/apache/kafka/clients/producer/ProducerRecord;)Lorg/apache/kafka/clients/producer/ProducerRecord; | ||
| } | ||
|
|
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,83 @@ | ||
| import net.ltgt.gradle.errorprone.errorprone | ||
| import org.jetbrains.kotlin.gradle.tasks.KotlinCompile | ||
|
|
||
| plugins { | ||
| `java-library` | ||
| id("io.sentry.javadoc") | ||
| alias(libs.plugins.kotlin.jvm) | ||
| jacoco | ||
| alias(libs.plugins.errorprone) | ||
| alias(libs.plugins.gradle.versions) | ||
| alias(libs.plugins.buildconfig) | ||
| } | ||
|
|
||
| tasks.withType<KotlinCompile>().configureEach { | ||
| compilerOptions.jvmTarget = org.jetbrains.kotlin.gradle.dsl.JvmTarget.JVM_1_8 | ||
| } | ||
|
|
||
| dependencies { | ||
| api(projects.sentry) | ||
| compileOnly(libs.kafka.clients) | ||
| compileOnly(libs.jetbrains.annotations) | ||
| compileOnly(libs.nopen.annotations) | ||
|
|
||
| errorprone(libs.errorprone.core) | ||
| errorprone(libs.nopen.checker) | ||
| errorprone(libs.nullaway) | ||
|
|
||
| // tests | ||
| testImplementation(projects.sentryTestSupport) | ||
| testImplementation(kotlin(Config.kotlinStdLib)) | ||
| testImplementation(libs.kotlin.test.junit) | ||
| testImplementation(libs.mockito.kotlin) | ||
| testImplementation(libs.mockito.inline) | ||
| testImplementation(libs.kafka.clients) | ||
| } | ||
|
|
||
| configure<SourceSetContainer> { test { java.srcDir("src/test/java") } } | ||
|
|
||
| jacoco { toolVersion = libs.versions.jacoco.get() } | ||
|
|
||
| tasks.jacocoTestReport { | ||
| reports { | ||
| xml.required.set(true) | ||
| html.required.set(false) | ||
| } | ||
| } | ||
|
|
||
| tasks { | ||
| jacocoTestCoverageVerification { | ||
| violationRules { rule { limit { minimum = Config.QualityPlugins.Jacoco.minimumCoverage } } } | ||
| } | ||
| check { | ||
| dependsOn(jacocoTestCoverageVerification) | ||
| dependsOn(jacocoTestReport) | ||
| } | ||
| } | ||
|
|
||
| tasks.withType<JavaCompile>().configureEach { | ||
| options.errorprone { | ||
| check("NullAway", net.ltgt.gradle.errorprone.CheckSeverity.ERROR) | ||
| option("NullAway:AnnotatedPackages", "io.sentry") | ||
| } | ||
| } | ||
|
|
||
| buildConfig { | ||
| useJavaOutput() | ||
| packageName("io.sentry.kafka") | ||
| buildConfigField("String", "SENTRY_KAFKA_SDK_NAME", "\"${Config.Sentry.SENTRY_KAFKA_SDK_NAME}\"") | ||
| buildConfigField("String", "VERSION_NAME", "\"${project.version}\"") | ||
| } | ||
|
|
||
| tasks.jar { | ||
| manifest { | ||
| attributes( | ||
| "Sentry-Version-Name" to project.version, | ||
| "Sentry-SDK-Name" to Config.Sentry.SENTRY_KAFKA_SDK_NAME, | ||
| "Sentry-SDK-Package-Name" to "maven:io.sentry:sentry-kafka", | ||
| "Implementation-Vendor" to "Sentry", | ||
| "Implementation-Title" to project.name, | ||
| "Implementation-Version" to project.version, | ||
| ) | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,95 @@ | ||
| package io.sentry.kafka; | ||
|
|
||
| import io.sentry.BaggageHeader; | ||
| import io.sentry.IScopes; | ||
| import io.sentry.ITransaction; | ||
| import io.sentry.SentryTraceHeader; | ||
| import io.sentry.SpanDataConvention; | ||
| import io.sentry.SpanStatus; | ||
| import io.sentry.TransactionContext; | ||
| import io.sentry.TransactionOptions; | ||
| import java.nio.charset.StandardCharsets; | ||
| import java.util.Collections; | ||
| import java.util.List; | ||
| import java.util.Map; | ||
| import org.apache.kafka.clients.consumer.ConsumerInterceptor; | ||
| import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
| import org.apache.kafka.clients.consumer.ConsumerRecords; | ||
| import org.apache.kafka.clients.consumer.OffsetAndMetadata; | ||
| import org.apache.kafka.common.TopicPartition; | ||
| import org.apache.kafka.common.header.Header; | ||
| import org.jetbrains.annotations.ApiStatus; | ||
| import org.jetbrains.annotations.NotNull; | ||
| import org.jetbrains.annotations.Nullable; | ||
|
|
||
| @ApiStatus.Internal | ||
| public final class SentryKafkaConsumerInterceptor<K, V> implements ConsumerInterceptor<K, V> { | ||
|
|
||
| public static final @NotNull String TRACE_ORIGIN = "auto.queue.kafka.consumer"; | ||
|
|
||
| private final @NotNull IScopes scopes; | ||
|
|
||
| public SentryKafkaConsumerInterceptor(final @NotNull IScopes scopes) { | ||
| this.scopes = scopes; | ||
| } | ||
|
|
||
| @Override | ||
| public @NotNull ConsumerRecords<K, V> onConsume(final @NotNull ConsumerRecords<K, V> records) { | ||
| if (!scopes.getOptions().isEnableQueueTracing() || records.isEmpty()) { | ||
| return records; | ||
| } | ||
|
|
||
| final @NotNull ConsumerRecord<K, V> firstRecord = records.iterator().next(); | ||
|
|
||
| try { | ||
| final @Nullable TransactionContext continued = continueTrace(firstRecord); | ||
| final @NotNull TransactionContext txContext = | ||
| continued != null ? continued : new TransactionContext("queue.receive", "queue.receive"); | ||
| txContext.setName("queue.receive"); | ||
| txContext.setOperation("queue.receive"); | ||
|
|
||
| final @NotNull TransactionOptions txOptions = new TransactionOptions(); | ||
| txOptions.setOrigin(TRACE_ORIGIN); | ||
| txOptions.setBindToScope(false); | ||
|
|
||
| final @NotNull ITransaction transaction = scopes.startTransaction(txContext, txOptions); | ||
| if (!transaction.isNoOp()) { | ||
| transaction.setData(SpanDataConvention.MESSAGING_SYSTEM, "kafka"); | ||
| transaction.setData(SpanDataConvention.MESSAGING_DESTINATION_NAME, firstRecord.topic()); | ||
| transaction.setData("messaging.batch.message.count", records.count()); | ||
| transaction.setStatus(SpanStatus.OK); | ||
| transaction.finish(); | ||
|
Comment on lines
+59
to
+61
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: The transaction in Suggested FixInstead of creating and finishing a transaction, consider starting a transaction and binding it to the scope. This would allow user code to retrieve the active span and finish it after message processing is complete, accurately capturing its duration and status. Document this pattern for non-Spring users. Prompt for AI AgentDid we get this right? 👍 / 👎 to inform future reviews. |
||
| } | ||
| } catch (Throwable ignored) { | ||
| // Instrumentation must never break the customer's Kafka poll loop. | ||
| } | ||
|
|
||
| return records; | ||
| } | ||
|
|
||
| @Override | ||
| public void onCommit(final @NotNull Map<TopicPartition, OffsetAndMetadata> offsets) {} | ||
|
|
||
| @Override | ||
| public void close() {} | ||
|
|
||
| @Override | ||
| public void configure(final @Nullable Map<String, ?> configs) {} | ||
|
|
||
| private @Nullable TransactionContext continueTrace(final @NotNull ConsumerRecord<K, V> record) { | ||
| final @Nullable String sentryTrace = headerValue(record, SentryTraceHeader.SENTRY_TRACE_HEADER); | ||
| final @Nullable String baggage = headerValue(record, BaggageHeader.BAGGAGE_HEADER); | ||
| final @Nullable List<String> baggageHeaders = | ||
| baggage != null ? Collections.singletonList(baggage) : null; | ||
| return scopes.continueTrace(sentryTrace, baggageHeaders); | ||
| } | ||
|
|
||
| private @Nullable String headerValue( | ||
| final @NotNull ConsumerRecord<K, V> record, final @NotNull String headerName) { | ||
| final @Nullable Header header = record.headers().lastHeader(headerName); | ||
| if (header == null || header.value() == null) { | ||
| return null; | ||
| } | ||
| return new String(header.value(), StandardCharsets.UTF_8); | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,72 @@ | ||
| package io.sentry.kafka | ||
|
|
||
| import io.sentry.IScopes | ||
| import io.sentry.ITransaction | ||
| import io.sentry.SentryOptions | ||
| import io.sentry.TransactionContext | ||
| import io.sentry.TransactionOptions | ||
| import kotlin.test.Test | ||
| import kotlin.test.assertSame | ||
| import org.apache.kafka.clients.consumer.ConsumerRecord | ||
| import org.apache.kafka.clients.consumer.ConsumerRecords | ||
| import org.apache.kafka.clients.consumer.OffsetAndMetadata | ||
| import org.apache.kafka.common.TopicPartition | ||
| import org.mockito.kotlin.any | ||
| import org.mockito.kotlin.mock | ||
| import org.mockito.kotlin.never | ||
| import org.mockito.kotlin.verify | ||
| import org.mockito.kotlin.whenever | ||
|
|
||
| class SentryKafkaConsumerInterceptorTest { | ||
|
|
||
| @Test | ||
| fun `does nothing when queue tracing is disabled`() { | ||
| val scopes = mock<IScopes>() | ||
| val options = SentryOptions().apply { isEnableQueueTracing = false } | ||
| whenever(scopes.options).thenReturn(options) | ||
|
|
||
| val interceptor = SentryKafkaConsumerInterceptor<String, String>(scopes) | ||
| val records = singleRecordBatch() | ||
|
|
||
| val result = interceptor.onConsume(records) | ||
|
|
||
| assertSame(records, result) | ||
| verify(scopes, never()).startTransaction(any<TransactionContext>(), any<TransactionOptions>()) | ||
| } | ||
|
|
||
| @Test | ||
| fun `starts and finishes queue receive transaction for consumed batch`() { | ||
| val scopes = mock<IScopes>() | ||
| val options = SentryOptions().apply { isEnableQueueTracing = true } | ||
| val transaction = mock<ITransaction>() | ||
|
|
||
| whenever(scopes.options).thenReturn(options) | ||
| whenever(scopes.continueTrace(any(), any())).thenReturn(null) | ||
| whenever(scopes.startTransaction(any<TransactionContext>(), any<TransactionOptions>())) | ||
| .thenReturn(transaction) | ||
| whenever(transaction.isNoOp).thenReturn(false) | ||
|
|
||
| val interceptor = SentryKafkaConsumerInterceptor<String, String>(scopes) | ||
|
|
||
| interceptor.onConsume(singleRecordBatch()) | ||
|
|
||
| verify(scopes).startTransaction(any<TransactionContext>(), any<TransactionOptions>()) | ||
| verify(transaction).setData("messaging.system", "kafka") | ||
| verify(transaction).setData("messaging.destination.name", "my-topic") | ||
| verify(transaction).setData("messaging.batch.message.count", 1) | ||
| verify(transaction).finish() | ||
| } | ||
|
|
||
| @Test | ||
| fun `commit callback is no-op`() { | ||
| val interceptor = SentryKafkaConsumerInterceptor<String, String>(mock()) | ||
|
|
||
| interceptor.onCommit(mapOf(TopicPartition("my-topic", 0) to OffsetAndMetadata(1))) | ||
| } | ||
|
|
||
| private fun singleRecordBatch(): ConsumerRecords<String, String> { | ||
| val partition = TopicPartition("my-topic", 0) | ||
| val record = ConsumerRecord("my-topic", 0, 0L, "key", "value") | ||
| return ConsumerRecords(mapOf(partition to listOf(record))) | ||
| } | ||
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Bug:
SentryKafkaConsumerInterceptorandSentryKafkaProducerInterceptorlack public no-arg constructors, preventing instantiation by Kafka when configured via class name.Severity: HIGH
Suggested Fix
Add a public, no-argument constructor to both
SentryKafkaConsumerInterceptorandSentryKafkaProducerInterceptor. The requiredIScopesdependency could then be injected via theconfigure()method, which Kafka calls after instantiation.Prompt for AI Agent
Did we get this right? 👍 / 👎 to inform future reviews.