How to retry with spring kafka version 2..2 - spring-kafka

Just trying to find out a simple example with spring-kafka 2.2 that works with a KafkaListener, to retry last failed message. If a message fails, the message should be redirected to another Topic where the retries attempts will be made.
We will have 4 topics.
topic, retryTopic, sucessTopic and errorTopic
If topic fails, should be redirected to retryTopic where the 3 attempts to retry will be made. If those attempts fails, must redirect to errorTopic. In case of sucess on both topic and retryTopic, should be redirected to the sucessTopic.

It's a little simpler with Spring Boot 2.2.4 and Spring for Apache Kafka 2.3.5:
(2.2.x shown below).
#SpringBootApplication
public class So60172304Application {
public static void main(String[] args) {
SpringApplication.run(So60172304Application.class, args);
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("topic").partitions(1).replicas(1).build();
}
#Bean
public NewTopic retryTopic() {
return TopicBuilder.name("retryTopic").partitions(1).replicas(1).build();
}
#Bean
public NewTopic successTopic() {
return TopicBuilder.name("successTopic").partitions(1).replicas(1).build();
}
#Bean
public NewTopic errorTopic() {
return TopicBuilder.name("errorTopic").partitions(1).replicas(1).build();
}
#Bean
public ApplicationRunner runner(KafkaTemplate<String, String> template) {
return args -> {
template.send("topic", "failAlways");
template.send("topic", "onlyFailFirst");
template.send("topic", "good");
};
}
/*
* A custom container factory is needed until 2.3.6 is released because the
* container customizer was not applied before then.
*/
#Bean
ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
ConsumerFactory<Object, Object> kafkaConsumerFactory,
KafkaTemplate<Object, Object> template) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory =
new ConcurrentKafkaListenerContainerFactory<Object, Object>() {
#Override
protected void initializeContainer(ConcurrentMessageListenerContainer<Object, Object> instance,
KafkaListenerEndpoint endpoint) {
super.initializeContainer(instance, endpoint);
customizer(template).configure(instance);
}
};
configurer.configure(factory, kafkaConsumerFactory);
// factory.setContainerCustomizer(customizer(template)); // after 2.3.6
return factory;
}
private ContainerCustomizer<Object, Object, ConcurrentMessageListenerContainer<Object, Object>>
customizer(KafkaTemplate<Object, Object> template) {
return container -> {
if (container.getContainerProperties().getTopics()[0].equals("topic")) {
container.setErrorHandler(new SeekToCurrentErrorHandler(
new DeadLetterPublishingRecoverer(template,
(cr, ex) -> new TopicPartition("retryTopic", cr.partition())),
new FixedBackOff(0L, 0L)));
}
else if (container.getContainerProperties().getTopics()[0].equals("retryTopic")) {
container.setErrorHandler(new SeekToCurrentErrorHandler(
new DeadLetterPublishingRecoverer(template,
(cr, ex) -> new TopicPartition("errorTopic", cr.partition())),
new FixedBackOff(5000L, 2L)));
}
};
}
}
#Component
class Listener {
private final KafkaTemplate<String, String> template;
public Listener(KafkaTemplate<String, String> template) {
this.template = template;
}
#KafkaListener(id = "so60172304.1", topics = "topic")
public void listen1(String in) {
System.out.println("topic: " + in);
if (in.toLowerCase().contains("fail")) {
throw new RuntimeException(in);
}
this.template.send("successTopic", in);
}
#KafkaListener(id = "so60172304.2", topics = "retryTopic")
public void listen2(String in) {
System.out.println("retryTopic: " + in);
if (in.startsWith("fail")) {
throw new RuntimeException(in);
}
this.template.send("successTopic", in);
}
#KafkaListener(id = "so60172304.3", topics = "successTopic")
public void listen3(String in) {
System.out.println("successTopic: " + in);
}
#KafkaListener(id = "so60172304.4", topics = "errorTopic")
public void listen4(String in) {
System.out.println("errorTopic: " + in);
}
}
spring.kafka.consumer.auto-offset-reset=earliest
result:
topic: failAlways
retryTopic: failAlways
topic: onlyFailFirst
topic: good
successTopic: good
retryTopic: failAlways
retryTopic: failAlways
retryTopic: onlyFailFirst
errorTopic: failAlways
successTopic: onlyFailFirst
With Spring Boot 2.1.12 and Spring for Apache Kafka 2.2.12:
#SpringBootApplication
public class So601723041Application {
public static void main(String[] args) {
SpringApplication.run(So601723041Application.class, args);
}
#Bean
public NewTopic topic() {
return new NewTopic("topic", 1, (short) 1);
}
#Bean
public NewTopic retryTopic() {
return new NewTopic("retryTopic", 1, (short) 1);
}
#Bean
public NewTopic successTopic() {
return new NewTopic("successTopic", 1, (short) 1);
}
#Bean
public NewTopic errorTopic() {
return new NewTopic("errorTopic", 1, (short) 1);
}
#Bean
public ApplicationRunner runner(KafkaTemplate<String, String> template) {
return args -> {
template.send("topic", "failAlways");
template.send("topic", "onlyFailFirst");
template.send("topic", "good");
};
}
#Bean
ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
ConsumerFactory<Object, Object> kafkaConsumerFactory,
KafkaTemplate<Object, Object> template) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory =
new ConcurrentKafkaListenerContainerFactory<Object, Object>() {
#Override
protected void initializeContainer(ConcurrentMessageListenerContainer<Object, Object> instance,
KafkaListenerEndpoint endpoint) {
super.initializeContainer(instance, endpoint);
customize(instance, template);
}
};
configurer.configure(factory, kafkaConsumerFactory);
return factory;
}
#Bean
ConcurrentKafkaListenerContainerFactory<?, ?> retryKafkaListenerContainerFactory(
ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
ConsumerFactory<Object, Object> kafkaConsumerFactory,
KafkaTemplate<Object, Object> template) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory =
new ConcurrentKafkaListenerContainerFactory<Object, Object>() {
#Override
protected void initializeContainer(ConcurrentMessageListenerContainer<Object, Object> instance,
KafkaListenerEndpoint endpoint) {
super.initializeContainer(instance, endpoint);
customize(instance, template);
}
};
configurer.configure(factory, kafkaConsumerFactory);
RetryTemplate retryTemplate = new RetryTemplate();
retryTemplate.setRetryPolicy(new SimpleRetryPolicy(3));
FixedBackOffPolicy backOffPolicy = new FixedBackOffPolicy();
backOffPolicy.setBackOffPeriod(5000L);
retryTemplate.setBackOffPolicy(backOffPolicy);
factory.setRetryTemplate(retryTemplate);
return factory;
}
private void customize(ConcurrentMessageListenerContainer<Object, Object> container,
KafkaTemplate<Object, Object> template) {
if (container.getContainerProperties().getTopics()[0].equals("topic")) {
container.setErrorHandler(new SeekToCurrentErrorHandler(
new DeadLetterPublishingRecoverer(template,
(cr, ex) -> new TopicPartition("retryTopic", cr.partition())),
0));
}
else if (container.getContainerProperties().getTopics()[0].equals("retryTopic")) {
container.setErrorHandler(new SeekToCurrentErrorHandler(
new DeadLetterPublishingRecoverer(template,
(cr, ex) -> new TopicPartition("errorTopic", cr.partition())),
0)); // no retries here - retry template instead.
}
}
}
#Component
class Listener {
private final KafkaTemplate<String, String> template;
public Listener(KafkaTemplate<String, String> template) {
this.template = template;
}
#KafkaListener(id = "so60172304.1", topics = "topic")
public void listen1(String in) {
System.out.println("topic: " + in);
if (in.toLowerCase().contains("fail")) {
throw new RuntimeException(in);
}
this.template.send("successTopic", in);
}
#KafkaListener(id = "so60172304.2", topics = "retryTopic", containerFactory = "retryKafkaListenerContainerFactory")
public void listen2(String in) {
System.out.println("retryTopic: " + in);
if (in.startsWith("fail")) {
throw new RuntimeException(in);
}
this.template.send("successTopic", in);
}
#KafkaListener(id = "so60172304.3", topics = "successTopic")
public void listen3(String in) {
System.out.println("successTopic: " + in);
}
#KafkaListener(id = "so60172304.4", topics = "errorTopic")
public void listen4(String in) {
System.out.println("errorTopic: " + in);
}
}
EDIT
To change the payload in the published record, you could use something like this (call MyRepublisher.setNewValue("new value");).
public class MyRepublisher extends DeadLetterPublishingRecoverer {
private static final ThreadLocal<String> newValue = new ThreadLocal<>();
public MyRepublisher(KafkaTemplate<Object, Object> template,
BiFunction<ConsumerRecord<?, ?>, Exception, TopicPartition> destinationResolver) {
super(template, destinationResolver);
}
#Override
protected ProducerRecord<Object, Object> createProducerRecord(ConsumerRecord<?, ?> record,
TopicPartition topicPartition, RecordHeaders headers) {
ProducerRecord<Object, Object> producerRecord = new ProducerRecord<>(topicPartition.topic(),
topicPartition.partition() < 0 ? null : topicPartition.partition(),
record.key(), newValue.get(), headers);
newValue.remove();
return producerRecord;
}
public static void setNewValue(String value) {
newValue.set(value);
}
}

Related

How to fix apps containing an unsafe implementation of TrustManager and HostnameVerifier

After upload my apps in Google play Store, getting a mail from google that the app are using an unsafe implementation of the TrustManager and HostnameVerifier. How to fix this problem? Can someone give me a direction?
public class HttpsTrustManager implements X509TrustManager {
private static TrustManager[] trustManagers;
private static final X509Certificate[] _AcceptedIssuers = new X509Certificate[]{};
#Override
public void checkClientTrusted(
java.security.cert.X509Certificate[] x509Certificates, String s)
throws java.security.cert.CertificateException {
}
#Override
public void checkServerTrusted(
java.security.cert.X509Certificate[] x509Certificates, String s)
throws java.security.cert.CertificateException {
}
public boolean isClientTrusted(X509Certificate[] chain) {
return true;
}
public boolean isServerTrusted(X509Certificate[] chain) {
return true;
}
#Override
public X509Certificate[] getAcceptedIssuers() {
return _AcceptedIssuers;
}
public static void allowAllSSL() {
HttpsURLConnection.setDefaultHostnameVerifier(new HostnameVerifier() {
#Override
public boolean verify(String arg0, SSLSession arg1) {
return true;
}
});
SSLContext context = null;
if (trustManagers == null) {
trustManagers = new TrustManager[]{new HttpsTrustManager()};
}
try {
context = SSLContext.getInstance("TLS");
context.init(null, trustManagers, new SecureRandom());
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (KeyManagementException e) {
e.printStackTrace();
}
HttpsURLConnection.setDefaultSSLSocketFactory(context
.getSocketFactory());
}
}
I have tried the above code.

How can i capture record key and value when there is a DeserializationException while consuming a message from kafka topic?

I'm using spring boot 2.1.7.RELEASE and spring-kafka 2.2.8.RELEASE.And I'm using #KafkaListener annotation to create a consumer and I'm using all default settings for the consumer.And I'm using below configuration as specified in the Spring-Kafka documentation.
// other props
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer2.class);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer2.class);
props.put(ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, StringDeserializer.class);
props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, AvroDeserializer.class.getName());
return new DefaultKafkaConsumerFactory<>(props);
Now, I've implemented my custom SeekToCurrentErrorHandler by extending SeekToCurrentErrorHandler as per the below thread but the record value is coming as null and the record key is not in a readable format. Please suggest me how can i get the record key and value?
How to capture the exception and message key when using ErrorHandlingDeserializer2 to handle exceptions during deserialization
Here is my custom SeekToCurrentErrorHandler code
#Component
public class MySeekToCurrentErrorHandler extends SeekToCurrentErrorHandler {
private final MyDeadLetterRecoverer deadLetterRecoverer;
#Autowired
public MySeekToCurrentErrorHandler(MyDeadLetterRecoverer deadLetterRecoverer) {
super(-1);
this.deadLetterRecoverer = deadLetterRecoverer;
}
#Override
public void handle(Exception thrownException, List<ConsumerRecord<?, ?>> data, Consumer<?, ?> consumer, MessageListenerContainer container) {
if (thrownException instanceof DeserializationException) {
//Improve to support multiple records
DeserializationException deserializationException = (DeserializationException) thrownException;
deadLetterRecoverer.accept(data.get(0), deserializationException);
ConsumerRecord<?, ?>. consumerRecord = data.get(0);
sout(consumerRecord.key());
sout(consumerRecord.value());
} else {
//Calling super method to let the 'SeekToCurrentErrorHandler' do what it is actually designed for
super.handle(thrownException, data, consumer, container);
}
}
}
If the key fails deserialization, the original byte[] can be obtained by calling getData() on the exception.
Similarly, if the value fails deserialization, use getData() to get the original data.
The DeadLetterPublishingRecoverer does this (since 2.3).
You can tell which of the key or value failed by calling isKey() on the exception.
EDIT
I was wrong, the key and value are available if the value or key failed.
This is written with Boot 2.3.4:
#SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
#Bean
SeekToCurrentErrorHandler errorHandler(ProducerFactory<String, String> pf) {
Map<String, Object> configs = new HashMap<>(pf.getConfigurationProperties());
configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
ProducerFactory<byte[], byte[]> bytesPF = new DefaultKafkaProducerFactory<>(configs);
KafkaOperations<byte[], byte[]> template = new KafkaTemplate<>(bytesPF);
return new SeekToCurrentErrorHandler(new DeadLetterPublishingRecoverer(template),
new FixedBackOff(1000, 5));
}
#KafkaListener(id = "so64597061", topics = "so64597061",
properties = {
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG
+ ":org.springframework.kafka.support.serializer.ErrorHandlingDeserializer",
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG
+ ":org.springframework.kafka.support.serializer.ErrorHandlingDeserializer",
ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS
+ ":com.example.demo.Application$FailSometimesDeserializer",
ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS
+ ":com.example.demo.Application$FailSometimesDeserializer"
})
public void listen(String val, #Header(name = KafkaHeaders.RECEIVED_MESSAGE_KEY) String key) {
System.out.println(key + ":" + val);
}
#KafkaListener(id = "so64597061.dlt", topics = "so64597061.DLT",
properties = {
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG
+ ":org.apache.kafka.common.serialization.ByteArrayDeserializer",
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG
+ ":org.apache.kafka.common.serialization.ByteArrayDeserializer"
})
public void dltListen(byte[] val, #Header(name = KafkaHeaders.RECEIVED_MESSAGE_KEY, required = false) byte[] key) {
String keyStr = key != null ? new String(key) : null;
String valStr = val != null ? new String(val) : null;
System.out.println("DLT:" + keyStr + ":" + valStr);
}
#Bean
public ApplicationRunner runner(KafkaTemplate<String, String> template) {
return args -> {
template.send("so64597061", "foo", "bar");
template.send("so64597061", "fail", "keyFailed");
template.send("so64597061", "valueFailed", "fail");
};
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("so64597061").partitions(1).replicas(1).build();
}
#Bean
public NewTopic dlt() {
return TopicBuilder.name("so64597061.DLT").partitions(1).replicas(1).build();
}
public static class FailSometimesDeserializer implements Deserializer<byte[]> {
#Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
#Override
public byte[] deserialize(String topic, byte[] data) {
return data;
}
#Override
public void close() {
}
#Override
public byte[] deserialize(String topic, Headers headers, byte[] data) {
String string = new String(data);
if ("fail".equals(string)) {
throw new RuntimeException("fail");
}
return data;
}
}
}
spring.kafka.consumer.auto-offset-reset=earliest
foo:bar
DLT:fail:keyFailed
DLT:valueFailed:fail

SeekToCurrentErrorHandler: DeadLetterPublishingRecoverer is not handling deserialize errors

I am trying to write kafka consumer using spring-kafka version 2.3.0.M2 library.
To handle run time errors I am using SeekToCurrentErrorHandler.class with DeadLetterPublishingRecoverer as my recoverer. This works fine only when my consumer code throws exception, but fails when unable to deserialize the message.
I tried implementing ErrorHandler myself and I was successful but with this approach I myself end up writing DLT code to handle error messages which I do not want to do.
Below are my kafka properties
spring:
kafka:
consumer:
bootstrap-servers: localhost:9092
group-id: group_id
auto-offset-reset: latest
key-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer2
value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer2
properties:
spring.json.trusted.packages: com.mypackage
spring.deserializer.key.delegate.class: org.apache.kafka.common.serialization.StringDeserializer
spring.deserializer.value.delegate.class: org.apache.kafka.common.serialization.StringDeserializer
public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory(
ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
ConsumerFactory<Object, Object> kafkaConsumerFactory,
KafkaTemplate<Object, Object> template) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
configurer.configure(factory, kafkaConsumerFactory);
factory.setErrorHandler(new SeekToCurrentErrorHandler(new DeadLetterPublishingRecoverer(template), maxFailures));}
It works fine for me (note that Boot will auto-configure the error handler)...
#SpringBootApplication
public class So56728833Application {
public static void main(String[] args) {
SpringApplication.run(So56728833Application.class, args);
}
#Bean
public SeekToCurrentErrorHandler errorHandler(KafkaTemplate<String, String> template) {
SeekToCurrentErrorHandler eh = new SeekToCurrentErrorHandler(new DeadLetterPublishingRecoverer(template), 3);
eh.setClassifier( // retry for all except deserialization exceptions
new BinaryExceptionClassifier(Collections.singletonList(DeserializationException.class), false));
return eh;
}
#KafkaListener(id = "so56728833"
+ "", topics = "so56728833")
public void listen(Foo in) {
System.out.println(in);
if (in.getBar().equals("baz")) {
throw new IllegalStateException("Test retries");
}
}
#KafkaListener(id = "so56728833dlt", topics = "so56728833.DLT")
public void listenDLT(Object in) {
System.out.println("Received from DLT: " + (in instanceof byte[] ? new String((byte[]) in) : in));
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("so56728833").partitions(1).replicas(1).build();
}
#Bean
public NewTopic dlt() {
return TopicBuilder.name("so56728833.DLT").partitions(1).replicas(1).build();
}
public static class Foo {
private String bar;
public Foo() {
super();
}
public Foo(String bar) {
this.bar = bar;
}
public String getBar() {
return this.bar;
}
public void setBar(String bar) {
this.bar = bar;
}
#Override
public String toString() {
return "Foo [bar=" + this.bar + "]";
}
}
}
spring:
kafka:
consumer:
auto-offset-reset: earliest
enable-auto-commit: false
key-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer2
value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer2
properties:
spring.json.trusted.packages: com.example
spring.deserializer.key.delegate.class: org.springframework.kafka.support.serializer.JsonDeserializer
spring.deserializer.value.delegate.class: org.springframework.kafka.support.serializer.JsonDeserializer
spring.json.value.default.type: com.example.So56728833Application$Foo
producer:
key-serializer: org.springframework.kafka.support.serializer.JsonSerializer
value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
logging:
level:
org.springframework.kafka: trace
I have 3 records in the topic:
"badJSON"
"{\"bar\":\"baz\"}"
"{\"bar\":\"qux\"}"
I see the first one going directly to the DLT, and the second one goes there after 3 attempts.

Kafka Spring: How to write unit tests for ConcurrentKafkaListenerContainerFactory and ConcurrentMessageListenerContainer?

I have 2 classes; 1 for the factories and the other for listener containers:
public class ConsumerFactories() {
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Byte[]> adeKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Byte[]> factory = null;
factory = new ConcurrentKafkaListenerContainerFactory<String, Byte[]>();
factory.setConsumerFactory(consumerFactory1());
factory.setConsumerFactory(consumerFactory2());
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
}
And my listener class has multiple containers:
#Bean
public ConcurrentMessageListenerContainer<String, byte[]> adeListenerContainer() throws BeansException, ClassNotFoundException {
final ContainerProperties containerProperties =
new ContainerProperties("topic1");
containerProperties.setMessageListener(new MessageListener<String, byte[]>() {
#Override
public void onMessage(ConsumerRecord<String, byte[]> record) {
System.out.println("Thread is: " + Thread.currentThread().getName());
}
});
ConcurrentMessageListenerContainer<String, byte[]> container =
new ConcurrentMessageListenerContainer<>(consumerFactory1, containerProperties);
container.setBeanName("bean1");
container.setConcurrency(60);
container.start();
return container;
}
#Bean
public ConcurrentMessageListenerContainer<String, byte[]> adeListenerContainer() throws BeansException, ClassNotFoundException {
final ContainerProperties containerProperties =
new ContainerProperties("topic1");
containerProperties.setMessageListener(new MessageListener<String, byte[]>() {
#Override
public void onMessage(ConsumerRecord<String, byte[]> record) {
System.out.println("Thread is: " + Thread.currentThread().getName());
}
});
ConcurrentMessageListenerContainer<String, byte[]> container =
new ConcurrentMessageListenerContainer<>(consumerFactory2, containerProperties);
container.setBeanName("bean2");
container.setConcurrency(60);
container.start();
return container;
}
1) How can I write unit tests for these 2 classes and methods?
2) Since all my listener containers are doing the same processing work but for a different set of topics, can I pass the topics when I'm setting consumerFactory or any other way?
1.
container.start();
Never start() components in bean definitions - the application context is not ready yet; the container will automatically start the containers at the right time (as long as autoStartup is true (default).
Why do you need a container factory if you are creating the containers youself?
It's not clear what you want to test.
EDIT
Here's an example of programmatically registering containers, using Spring Boot's auto-configured container factory (2.2 and above)...
#SpringBootApplication
public class So53752783Application {
public static void main(String[] args) {
SpringApplication.run(So53752783Application.class, args);
}
#SuppressWarnings("unchecked")
#Bean
public SmartInitializingSingleton creator(ConfigurableListableBeanFactory beanFactory,
ConcurrentKafkaListenerContainerFactory<String, String> factory) {
return () -> Stream.of("foo", "bar", "baz").forEach(topic -> {
ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(topic);
container.getContainerProperties().setMessageListener((MessageListener<String, String>) record -> {
System.out.println("Received " + record);
});
container.getContainerProperties().setGroupId(topic + ".group");
container = (ConcurrentMessageListenerContainer<String, String>)
beanFactory.initializeBean(container, topic + ".container");
beanFactory.registerSingleton(topic + ".container", container);
container.start();
});
}
}
To unit test your listener,
container.getContainerProperties().getMessagelistener()
cast it and invoke onMessage() and verify it did what you expected.
EDIT2 Unit Testing the listener
#SpringBootApplication
public class So53752783Application {
public static void main(String[] args) {
SpringApplication.run(So53752783Application.class, args);
}
#SuppressWarnings("unchecked")
#Bean
public SmartInitializingSingleton creator(ConfigurableListableBeanFactory beanFactory,
ConcurrentKafkaListenerContainerFactory<String, String> factory,
MyListener listener) {
return () -> Stream.of("foo", "bar", "baz").forEach(topic -> {
ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(topic);
container.getContainerProperties().setMessageListener(listener);
container.getContainerProperties().setGroupId(topic + ".group");
container = (ConcurrentMessageListenerContainer<String, String>)
beanFactory.initializeBean(container, topic + ".container");
beanFactory.registerSingleton(topic + ".container", container);
container.start();
});
}
#Bean
public MyListener listener() {
return new MyListener();
}
public static class MyListener implements MessageListener<String, String> {
#Autowired
private Service service;
public void setService(Service service) {
this.service = service;
}
#Override
public void onMessage(ConsumerRecord<String, String> data) {
this.service.callSomeService(data.value().toUpperCase());
}
}
public interface Service {
void callSomeService(String in);
}
#Component
public static class DefaultService implements Service {
#Override
public void callSomeService(String in) {
// ...
}
}
}
and
#RunWith(SpringRunner.class)
#SpringBootTest
public class So53752783ApplicationTests {
#Autowired
private ApplicationContext context;
#Test
public void test() {
ConcurrentMessageListenerContainer<?, ?> container = context.getBean("foo.container",
ConcurrentMessageListenerContainer.class);
MyListener messageListener = (MyListener) container.getContainerProperties().getMessageListener();
Service service = mock(Service.class);
messageListener.setService(service);
messageListener.onMessage(new ConsumerRecord<>("foo", 0, 0L, "key", "foo"));
verify(service).callSomeService("FOO");
}
}

Onion Architecture Unit Of Work Transaction Not getting Connection String

I am using Onion Architecture with Autofac. In my Dependency Injection Code, I am using:
[assembly: WebActivatorEx.PostApplicationStartMethod(typeof(IocConfig), "RegisterDependencies")]
namespace AppMVC.Infrastructure.Bootstrapper
{
public class IocConfig
{
public static void RegisterDependencies()
{
var builder = new ContainerBuilder();
builder.RegisterType(typeof(UnitOfWork)).As(typeof(IUnitOfWork)).InstancePerHttpRequest();
builder.Register<IEntitiesContext>(b =>
{
var context = new MyContext("My Connection String");
return context;
}).InstancePerHttpRequest();
}
}
}
Unit Of Work Code:
public class UnitOfWork : IUnitOfWork
{
private readonly IEntitiesContext _context;
private bool _disposed;
private Hashtable _repositories;
public UnitOfWork(IEntitiesContext context)
{
_context = context;
}
public int SaveChanges()
{
return _context.SaveChanges();
}
public IRepository<TEntity> Repository<TEntity>() where TEntity : BaseEntity
{
if (_repositories == null)
{
_repositories = new Hashtable();
}
var type = typeof(TEntity).Name;
if (_repositories.ContainsKey(type))
{
return (IRepository<TEntity>)_repositories[type];
}
var repositoryType = typeof(EntityRepository<>);
_repositories.Add(type, Activator.CreateInstance(repositoryType.MakeGenericType(typeof(TEntity)), _context));
return (IRepository<TEntity>)_repositories[type];
}
public void BeginTransaction()
{
_context.BeginTransaction();
}
public int Commit()
{
return _context.Commit();
}
public void Rollback()
{
_context.Rollback();
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
public virtual void Dispose(bool disposing)
{
if (!_disposed && disposing)
{
_context.Dispose();
foreach (IDisposable repository in _repositories.Values)
{
repository.Dispose();// dispose all repositries
}
}
_disposed = true;
}
}
MyContext Code:
public class MyContext : DbContext, IEntitiesContext
{
private ObjectContext _objectContext;
private DbTransaction _transaction;
public MyContext(string nameOrConnectionString)
: base(nameOrConnectionString)
{
}
public void BeginTransaction()
{
_objectContext = ((IObjectContextAdapter)this).ObjectContext;
if (_objectContext.Connection.State == ConnectionState.Open)
{
if (_transaction == null)
{
_transaction = _objectContext.Connection.BeginTransaction();
}
return;
}
_objectContext.Connection.Open(); // At this Line, I am getting Exception
if (_transaction == null)
{
_transaction = _objectContext.Connection.BeginTransaction();
}
}
public int Commit()
{
var saveChanges = SaveChanges();
_transaction.Commit();
return saveChanges;
}
public void Rollback()
{
_transaction.Rollback();
}
}
My problem is, On _objectContext.Connection.Open();, I am getting Connection String missing error.
Below is the screenshot of the Exception:

Resources