-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Bounded queue-size in front of threadpool, with custom rejectPolicy t… (
#198) Bounded queue-size in front of threadpool, with custom rejectPolicy that will log the event-record when it is discarded. Fixes GH-190
- Loading branch information
Showing
10 changed files
with
222 additions
and
34 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
40 changes: 40 additions & 0 deletions
40
log-event/src/main/java/no/digdir/logging/event/DiscardAndLogOldestPolicy.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
package no.digdir.logging.event; | ||
|
||
import org.slf4j.Logger; | ||
import org.slf4j.LoggerFactory; | ||
|
||
import java.util.concurrent.RejectedExecutionHandler; | ||
import java.util.concurrent.ThreadPoolExecutor; | ||
|
||
/** | ||
* Based upon DiscardOldestPolicy, modified for our needs. | ||
* | ||
* @see java.util.concurrent.ThreadPoolExecutor.DiscardOldestPolicy | ||
*/ | ||
class DiscardAndLogOldestPolicy implements RejectedExecutionHandler { | ||
private static final Logger log = LoggerFactory.getLogger(DiscardAndLogOldestPolicy.class); | ||
|
||
/** | ||
* Obtains, logs and ignores the next task that the executor | ||
* would otherwise execute, if one is immediately available, | ||
* and then retries execution of task r, unless the executor | ||
* is shut down, in which case task r is instead discarded. | ||
* | ||
* @param r the runnable task requested to be executed | ||
* @param e the executor attempting to execute this task | ||
*/ | ||
public void rejectedExecution(Runnable r, ThreadPoolExecutor e) { | ||
if (!e.isShutdown()) { | ||
Runnable runnableToBeDiscarded = e.getQueue().poll(); | ||
if (runnableToBeDiscarded instanceof KafkaTask) { | ||
log.warn("Queue is full, discarding event: {}", ((KafkaTask) runnableToBeDiscarded).getProducerRecord() | ||
.value()); | ||
} else { | ||
if (runnableToBeDiscarded != null) { | ||
log.warn("Discarded runnable of unknown type. It was: " + runnableToBeDiscarded.getClass()); | ||
} | ||
} | ||
e.execute(r); | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
45 changes: 45 additions & 0 deletions
45
log-event/src/main/java/no/digdir/logging/event/KafkaTask.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
package no.digdir.logging.event; | ||
|
||
import org.apache.avro.specific.SpecificRecordBase; | ||
import org.apache.kafka.clients.producer.Producer; | ||
import org.apache.kafka.clients.producer.ProducerRecord; | ||
import org.slf4j.Logger; | ||
import org.slf4j.LoggerFactory; | ||
|
||
/** | ||
* This class provides a convenient way of getting the ProducerRecord out of a submitted runnable task to the executorService. | ||
* Used in combination with the DiscardAndLogOldestPolicy | ||
* | ||
* @see DiscardAndLogOldestPolicy | ||
*/ | ||
class KafkaTask implements Runnable { | ||
private static final Logger log = LoggerFactory.getLogger(KafkaTask.class); | ||
private final ProducerRecord<String, SpecificRecordBase> producerRecord; | ||
private final Producer<String, SpecificRecordBase> producer; | ||
|
||
public KafkaTask( | ||
ProducerRecord<String, SpecificRecordBase> producerRecord, | ||
Producer<String, SpecificRecordBase> producer) { | ||
this.producerRecord = producerRecord; | ||
this.producer = producer; | ||
} | ||
|
||
@Override | ||
public void run() { | ||
try { | ||
producer.send(producerRecord, (recordMetadata, e) -> { | ||
if (e != null) { | ||
log.warn("Failed to publish event {}", producerRecord.value(), e); | ||
} else if (log.isTraceEnabled() && recordMetadata != null) { | ||
log.trace("Sent record {} with offset {}", producerRecord, recordMetadata.offset()); | ||
} | ||
}); | ||
} catch (Exception e) { | ||
log.warn("Failed to publish event {}", producerRecord.value(), e); | ||
} | ||
} | ||
|
||
ProducerRecord<String, SpecificRecordBase> getProducerRecord() { | ||
return producerRecord; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
57 changes: 57 additions & 0 deletions
57
log-event/src/test/java/no/digdir/logging/event/DiscardAndLogOldestPolicyTest.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
package no.digdir.logging.event; | ||
|
||
import org.apache.avro.specific.SpecificRecordBase; | ||
import org.apache.kafka.clients.producer.KafkaProducer; | ||
import org.apache.kafka.clients.producer.Producer; | ||
import org.junit.jupiter.api.Test; | ||
|
||
import java.util.UUID; | ||
import java.util.concurrent.BlockingQueue; | ||
import java.util.concurrent.ThreadPoolExecutor; | ||
|
||
import static org.mockito.ArgumentMatchers.eq; | ||
import static org.mockito.Mockito.mock; | ||
import static org.mockito.Mockito.verify; | ||
import static org.mockito.Mockito.when; | ||
|
||
class DiscardAndLogOldestPolicyTest { | ||
|
||
private final ThreadPoolExecutor executor = mock(ThreadPoolExecutor.class); | ||
private final BlockingQueue<Runnable> queue = mock(BlockingQueue.class); | ||
private final DiscardAndLogOldestPolicy discardAndLogOldestPolicy = new DiscardAndLogOldestPolicy(); | ||
private final EventLoggingConfig config = EventLoggingConfig.builder() | ||
.applicationName("app") | ||
.environmentName("env") | ||
.bootstrapServers("localhost:443") | ||
.schemaRegistryUrl("localhost:433") | ||
.kafkaUsername("user") | ||
.threadPoolSize(1) | ||
.build(); | ||
private final Producer<String, SpecificRecordBase> kafkaProducer = new KafkaProducer<>(config.getProducerConfig()); | ||
|
||
@Test | ||
void testOldestIsDescheduled() { | ||
KafkaTask oldKafkaTask = createKafkaTask("OldEvent"); | ||
when(executor.getQueue()).thenReturn(queue); | ||
when(queue.poll()).thenReturn(oldKafkaTask); | ||
|
||
KafkaTask newKafkaTask = createKafkaTask("NewEvent"); | ||
|
||
discardAndLogOldestPolicy.rejectedExecution(newKafkaTask, executor); | ||
verify(executor).execute(eq(newKafkaTask)); | ||
verify(queue).poll(); | ||
} | ||
|
||
private KafkaTask createKafkaTask(String eventName) { | ||
return new KafkaTask(ActivityRecord.builder() | ||
.eventName(eventName) | ||
.eventDescription("Brukeren har logget inn") | ||
.eventSubjectPid("123") | ||
.correlationId(UUID.randomUUID().toString()) | ||
.serviceProviderId("McDuck IT") | ||
.serviceOwnerId("Andeby kommune") | ||
.authEid("MinID") | ||
.authMethod("OTC") | ||
.build().toProducerRecord(config), kafkaProducer); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters