Spring Kafka Unit Tests Triggers the listener, but the method cannot get the message using consumer.poll - spring-kafka

We are using spring-kafka-test-2.2.8-RELEASE.
When I use the template to send the message, it triggers the listener correctly, but I can't get the message content in the consumer.poll. If i instantiate the KafkaTemplate without "wiring" it in a class attribute and Instantiate it based on a producer factory, it sends the message, but does not trigger the #KafkaListener, only work if I setup a Message Listener inside the #Test Method. I need to trigger the kafka listener and realize which Topic will be called next("sucess" topic when executed without errors, and "errorTopic" the listener throws an Exception) and the message content.
#RunWith(SpringRunner.class)
#SpringBootTest
#EmbeddedKafka(partitions = 1, topics = { "tp-in-gco-mao-notasfiscais" })
public class InvoicingServiceTest {
#Autowired
private NFKafkaListener nfKafkaListener;
#ClassRule
public static EmbeddedKafkaRule broker = new EmbeddedKafkaRule(1, false, "tp-in-gco-mao-
notasfiscais");
#Value("${" + EmbeddedKafkaBroker.SPRING_EMBEDDED_KAFKA_BROKERS + "}")
private String brokerAddresses;
#Autowired
private KafkaTemplate<Object, Object> template;
#BeforeClass
public static void setup() {
System.setProperty(EmbeddedKafkaBroker.BROKER_LIST_PROPERTY,
"spring.kafka.bootstrap-servers");
}
#Test
public void testTemplate() throws Exception {
NFServiceTest nfServiceTest = spy(new NFServiceTest());
nfKafkaListener.setNfServiceClient(nfServiceTest);
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("teste9", "false", broker.getEmbeddedKafka());
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, InvoiceDeserializer.class);
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
DefaultKafkaConsumerFactory<Integer, Object> cf = new DefaultKafkaConsumerFactory<Integer, Object>(
consumerProps);
Consumer<Integer, Object> consumer = cf.createConsumer();
broker.getEmbeddedKafka().consumeFromAnEmbeddedTopic(consumer, "tp-in-gco-mao-notasfiscais");
ZfifNfMao zf = new ZfifNfMao();
zf.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zf.setItItensnf(zfietb011);
template.send("tp-in-gco-mao-notasfiscais", zf);
List<ConsumerRecord<Integer, Object>> received = new ArrayList<>();
int n = 0;
while (received.size() < 1 && n++ < 10) {
ConsumerRecords<Integer, Object> records1 = consumer.poll(Duration.ofSeconds(10));
//records1 is always empty
if (!records1.isEmpty()) {
records1.forEach(rec -> received.add(rec));
}
}
assertThat(received).extracting(rec -> {
ZfifNfMao zfifNfMaoRdesponse = (ZfifNfMao) rec.value();
return zfifNfMaoRdesponse.getItItensnf().getItem().get(0).getMatkl();
}).contains("TESTE");
broker.getEmbeddedKafka().getKafkaServers().forEach(b -> b.shutdown());
broker.getEmbeddedKafka().getKafkaServers().forEach(b -> b.awaitShutdown());
consumer.close();
}
public static class NFServiceTest implements INFServiceClient {
CountDownLatch latch = new CountDownLatch(1);
#Override
public ZfifNfMaoResponse enviarSap(ZfifNfMao zfifNfMao) {
ZfifNfMaoResponse zfifNfMaoResponse = new ZfifNfMaoResponse();
zfifNfMaoResponse.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zfifNfMaoResponse.setItItensnf(zfietb011);
return zfifNfMaoResponse;
}
}
}

You have two brokers; one created by #EmbeddedKafka and one created by the #ClassRule.
Use one or the other; preferably the #EmbeddedKafka and simply #Autowired the broker instance.
I am guessing the consumers are listening to different brokers; you can confirm that by looking at the INFO logs put out by the consumer config.

I've followed your advice but it keeps triggering the listener, but consumer.poll does not capture the topic content.
#RunWith(SpringRunner.class)
#SpringBootTest
#EmbeddedKafka(partitions = 1, topics = { "tp-in-gco-mao-notasfiscais" })
public class InvoicingServiceTest {
#Autowired
private NFKafkaListener nfKafkaListener;
#Autowired
public EmbeddedKafkaBroker broker;
#Autowired
private KafkaTemplate<Object, Object> template;
#BeforeClass
public static void setup() {
System.setProperty(EmbeddedKafkaBroker.BROKER_LIST_PROPERTY,
"spring.kafka.bootstrap-servers");
}
#Test
public void testTemplate() throws Exception {
NFServiceTest nfServiceTest = spy(new NFServiceTest());
nfKafkaListener.setNfServiceClient(nfServiceTest);
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("teste9", "false", broker);
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, InvoiceDeserializer.class);
DefaultKafkaConsumerFactory<Integer, Object> cf = new DefaultKafkaConsumerFactory<Integer, Object>(
consumerProps);
Consumer<Integer, Object> consumer = cf.createConsumer();
broker.consumeFromAnEmbeddedTopic(consumer, "tp-in-gco-mao-notasfiscais");
ZfifNfMao zf = new ZfifNfMao();
zf.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zf.setItItensnf(zfietb011);
template.send("tp-in-gco-mao-notasfiscais", zf);
List<ConsumerRecord<Integer, Object>> received = new ArrayList<>();
int n = 0;
while (received.size() < 1 && n++ < 10) {
ConsumerRecords<Integer, Object> records1 = consumer.poll(Duration.ofSeconds(10));
//records1 is always empty
if (!records1.isEmpty()) {
records1.forEach(rec -> received.add(rec));
}
}
assertThat(received).extracting(rec -> {
ZfifNfMao zfifNfMaoRdesponse = (ZfifNfMao) rec.value();
return zfifNfMaoRdesponse.getItItensnf().getItem().get(0).getMatkl();
}).contains("TESTE");
broker.getKafkaServers().forEach(b -> b.shutdown());
broker.getKafkaServers().forEach(b -> b.awaitShutdown());
consumer.close();
}
public static class NFServiceTest implements INFServiceClient {
CountDownLatch latch = new CountDownLatch(1);
#Override
public ZfifNfMaoResponse enviarSap(ZfifNfMao zfifNfMao) {
ZfifNfMaoResponse zfifNfMaoResponse = new ZfifNfMaoResponse();
zfifNfMaoResponse.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zfifNfMaoResponse.setItItensnf(zfietb011);
return zfifNfMaoResponse;
}
}
}

Related

spring-kafka delivery attempt is not being increased when message was retried

I am using spring-kafka 2.8.6 with retry RetryTopicConfiguration.
#KafkaListener(
topics = "...",
groupId = "...",
containerFactory = "kafkaListenerContainerFactory")
public void listenWithHeaders(final #Valid #Payload Event event,
#Header(KafkaHeaders.DELIVERY_ATTEMPT) final int deliveryAttempt) {
}
I have setup common error handler, and also enable delivery attempt header.
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Event>
kafkaListenerContainerFactory(#Qualifier("ConsumerFactory") final ConsumerFactory<String, Event> consumerFactory) {
final ConcurrentKafkaListenerContainerFactory<String, Event> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
factory.setCommonErrorHandler(new DefaultErrorHandler(new ExponentialBackOff(kafkaProperties.getExponentialBackoffInitialInterval(), kafkaProperties.getExponentialBackoffMultiplier())));
LOGGER.info("setup ConcurrentKafkaListenerContainerFactory");
factory.getContainerProperties().setDeliveryAttemptHeader(true);
return factory;
}
But when retry is triggered, delivery attempt in the message header is always 1, never increase.
Do I miss any other part? Thanks!
--- I am using retry topic configuration.
#Bean
public RetryTopicConfiguration retryableTopicKafkaTemplate(#Qualifier("kafkaTemplate") KafkaTemplate<String, Event> kafkaTemplate) {
return RetryTopicConfigurationBuilder
.newInstance()
.exponentialBackoff(
properties.getExponentialBackoffInitialInterval(),
properties.getExponentialBackoffMultiplier(),
properties.getExponentialBackoffMaxInterval())
.autoCreateTopics(properties.isRetryTopicAutoCreateTopics(), properties.getRetryTopicAutoCreateNumPartitions(), properties.getRetryTopicAutoCreateReplicationFactor())
.maxAttempts(properties.getMaxAttempts())
.notRetryOn(...) .retryTopicSuffix(properties.getRetryTopicSuffix())
.dltSuffix(properties.getDltSuffix())
.create(kafkaTemplate);
---- Followed by Gary's suggestion, have it fully working now with my listener.
#KafkaListener(
topics = "...",
groupId = "...",
containerFactory = "kafkaListenerContainerFactory")
public void listenWithHeaders(final #Valid #Payload Event event,
#Header(value = RetryTopicHeaders.DEFAULT_HEADER_ATTEMPTS, required = false) final Integer deliveryAttempt) {
...
It works fine for me with this:
#SpringBootApplication
public class So72871495Application {
public static void main(String[] args) {
SpringApplication.run(So72871495Application.class, args);
}
#KafkaListener(id = "so72871495", topics = "so72871495")
void listen(String in, #Header(KafkaHeaders.DELIVERY_ATTEMPT) int delivery) {
System.out.println(in + " " + delivery);
throw new RuntimeException("test");
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("so72871495").partitions(1).replicas(1).build();
}
#Bean
ApplicationRunner runner(KafkaTemplate<String, String> template,
AbstractKafkaListenerContainerFactory<?, ?, ?> factory) {
factory.getContainerProperties().setDeliveryAttemptHeader(true);
factory.setCommonErrorHandler(new DefaultErrorHandler(new FixedBackOff(5000L, 3)));
return args -> {
template.send("so72871495", "foo");
};
}
}
foo 1
foo 2
foo 3
foo 4
If you can't figure out what's different for you, please provide an MCRE so I can see what's wrong.
EDIT
With #RetryableTopic, that header is always 1 because each delivery is the first attempt from a different topic.
Use this instead
void listen(String in, #Header(name = RetryTopicHeaders.DEFAULT_HEADER_ATTEMPTS, required = false) Integer attempts) {
Integer not int. It will be null on the first attempt and 2, 3, etc on the retries.

How does spring-data-redis get an objectRecord with a Generic type

I want to implement MQ using stream in Redis, and when the entity type passed is generic, my code does not deserialize the entity consumption message correctly.
producer critical code:
#Bean
RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory rf) {
RedisTemplate<String, Object> rt = new RedisTemplate<>();
rt.setConnectionFactory(rf);
rt.setKeySerializer(RedisSerializer.string());
rt.setValueSerializer(RedisSerializer.json());
rt.setHashKeySerializer(RedisSerializer.string());
rt.setHashValueSerializer(RedisSerializer.json());
return rt;
}
#Bean
ApplicationRunner runner(RedisTemplate<String, Object> rt) {
return arg -> {
MyMessage<User> userMyMessage = new MyMessage<User>().setReceiver(1)
.setClientType("app")
.setPayload(new User().setName("testName")
.setAge(10)
.setId(1));
Jackson2HashMapper hashMapper = new Jackson2HashMapper(false);
rt.opsForStream()
.add(StreamRecords.newRecord()
.in("my-stream")
.ofMap(hashMapper.toHash(userMyMessage)));
};
}
the consumer:
#Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory rcf) {
RedisTemplate<String, Object> rt = new RedisTemplate<>();
rt.setConnectionFactory(rcf);
rt.setKeySerializer(RedisSerializer.string());
rt.setValueSerializer(RedisSerializer.json());
rt.setHashKeySerializer(RedisSerializer.string());
rt.setHashValueSerializer(RedisSerializer.json());
return rt;
}
#Bean
StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, ObjectRecord<String, Object>> hashContainerOptions() {
return StreamMessageListenerContainer.StreamMessageListenerContainerOptions.builder()
.pollTimeout(Duration.ofSeconds(1))
.objectMapper(new Jackson2HashMapper(false))
.build();
}
#Bean
StreamMessageListenerContainer<String, ObjectRecord<String, Object>> hashContainer(
StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, ObjectRecord<String, Object>> options,
RedisConnectionFactory redisConnectionFactory,
RedisTemplate<String, Object> redisTemplate) {
var container = StreamMessageListenerContainer.create(redisConnectionFactory, options);
container.start();
String group = "default-group";
String key = "my-stream";
try {
redisTemplate.opsForStream().createGroup(key, group);
} catch (Exception ignore) {
}
container.receiveAutoAck(
Consumer.from(group, "default-consumer"),
StreamOffset.create(key, ReadOffset.lastConsumed()),
message -> {
log.info("receive message stream:{}, id:{} value:{}", message.getStream(), message.getId(), message.getValue());
}
);
return container;
}
when producer starts, the object is cached correctly in Redis:
but the consumer does not read the object,The following is the log
receive message stream:my-stream, id:1655890663894-0 value:"com.commons.MyMessage"
the entity's attributes are gone, How do I make it work? And if I want listen to two different stream (message object types are different) what to do, is to configure two StreamMessageListenerContainer or can be configured in a container.
thanks!

Unit test for post sling servlet aem 6.5

I have the following POST servlet that adds new node under certain resource with parameters(name and last nam) from the request:
#Component(
service = Servlet.class,
property = {
"sling.servlet.paths=/bin/createuser",
"sling.servlet.methods=" + HttpConstants.METHOD_POST
})
public class CreateNodeServlet extends SlingAllMethodsServlet {
/**
* Logger
*/
private static final Logger log = LoggerFactory.getLogger(CreateNodeServlet.class);
#Override
protected void doPost(final SlingHttpServletRequest req, final SlingHttpServletResponse resp) throws IOException {
log.info("Inside CreateNodeServlet");
ResourceResolver resourceResolver = req.getResourceResolver();
final Resource resource = resourceResolver.getResource("/content/test/us/en");
String name = req.getParameter("name");
String lastname = req.getParameter("lastname");
log.info("name :{}",name);
log.info("lastname :{}",lastname);
Node node = resource.adaptTo(Node.class);
try {
log.info("Node {}", node.getName() );
Node newNode = node.addNode(name+lastname, "nt:unstructured");
newNode.setProperty("name", name);
newNode.setProperty("lastname", lastname);
resourceResolver.commit();
} catch (RepositoryException e) {
e.printStackTrace();
} catch (PersistenceException e) {
e.printStackTrace();
}
resp.setStatus(200);
resp.getWriter().write("Simple Post Test");
}
}
I tried creating unit test for this I got this so far:
#ExtendWith(AemContextExtension.class)
class CreateNodeServletTest {
private final AemContext context = new AemContext();
private CreateNodeServlet createNodeServlet = new CreateNodeServlet();
#Test
void doPost() throws IOException, JSONException {
context.currentPage(context.pageManager().getPage("/bin/createuser"));
context.currentResource(context.resourceResolver().getResource("/bin/createuser"));
context.requestPathInfo().setResourcePath("/bin/createuser");
MockSlingHttpServletRequest request = context.request();
MockSlingHttpServletResponse response = context.response();
createNodeServlet.doPost(request, response);
JSONArray output = new JSONArray(context.response().getOutputAsString());
assertEquals("Simple Post Test", output);
}
}
however this is not working I am getting null pointer on this line
Node node = resource.adaptTo(Node.class);
can some one help what I am missing and some tips will be of great help as I am new to AEM, and there is not much resources about unit testing sling servlets ?
I think you need to register JCR_MOCK as resource resolver type
new AemContext(ResourceResolverType.JCR_MOCK);

metrics method of MessageListenerContainer is not capturing the right values

I am using spring-kafka 2.2.8 to created a batch consumer and trying to capture the my container metrics to understand the performance details of the batch consumer.
#Bean
public ConsumerFactory consumerFactory(){
return new DefaultKafkaConsumerFactory(consumerConfigs(),stringKeyDeserializer(), avroValueDeserializer());
}
#Bean
public FixedBackOffPolicy getBackOffPolicy() {
FixedBackOffPolicy backOffPolicy = new FixedBackOffPolicy();
backOffPolicy.setBackOffPeriod(100);
return backOffPolicy;
}
#Bean
public ConcurrentKafkaListenerContainerFactory kafkaBatchListenerContainerFactory(){
ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory();
factory.setConsumerFactory(consumerFactory());
factory.setBatchListener(true);
factory.setStatefulRetry(true);
return factory;
}
public Map<String, Object> consumerConfigs(){
Map<String, Object> configs = new HashMap<>();
batchConsumerConfigProperties.setKeyDeserializerClassConfig();
batchConsumerConfigProperties.setValueDeserializerClassConfig();
batchConsumerConfigProperties.setKeyDeserializerClass(StringDeserializer.class);
batchConsumerConfigProperties.setValueDeserializerClass(KafkaAvroDeserializer.class);
batchConsumerConfigProperties.setSpecificAvroReader("true");
batchConsumerConfigProperties.setAutoOffsetResetConfig(environment.getProperty("sapphire.kes.consumer.auto.offset.reset", "earliest"));
batchConsumerConfigProperties.setEnableAutoCommitConfig(environment.getProperty("sapphire.kes.consumer.enable.auto.commit", "false"));
batchConsumerConfigProperties.setMaxPollIntervalMs(environment.getProperty(MAX_POLL_INTERVAL_MS_CONFIG, "300000"));
batchConsumerConfigProperties.setMaxPollRecords(environment.getProperty(MAX_POLL_RECORDS_CONFIG, "50000"));
batchConsumerConfigProperties.setSessionTimeoutms(environment.getProperty(SESSION_TIMEOUT_MS_CONFIG, "10000"));
batchConsumerConfigProperties.setRequestTimeOut(environment.getProperty(REQUEST_TIMEOUT_MS_CONFIG, "30000"));
batchConsumerConfigProperties.setHeartBeatIntervalMs(environment.getProperty(HEARTBEAT_INTERVAL_MS_CONFIG, "3000"));
batchConsumerConfigProperties.setFetchMinBytes(environment.getProperty(FETCH_MIN_BYTES_CONFIG, "1"));
batchConsumerConfigProperties.setFetchMaxBytes(environment.getProperty(FETCH_MAX_BYTES_CONFIG, "52428800"));
batchConsumerConfigProperties.setFetchMaxWaitMS(environment.getProperty(FETCH_MAX_WAIT_MS_CONFIG, "500"));
batchConsumerConfigProperties.setMaxPartitionFetchBytes(environment.getProperty(MAX_PARTITION_FETCH_BYTES_CONFIG, "1048576"));
batchConsumerConfigProperties.setConnectionsMaxIdleMs(environment.getProperty(CONNECTIONS_MAX_IDLE_MS_CONFIG, "540000"));
batchConsumerConfigProperties.setAutoCommitIntervalMS(environment.getProperty(AUTO_COMMIT_INTERVAL_MS_CONFIG, "5000"));
batchConsumerConfigProperties.setReceiveBufferBytes(environment.getProperty(RECEIVE_BUFFER_CONFIG, "65536"));
batchConsumerConfigProperties.setSendBufferBytes(environment.getProperty(SEND_BUFFER_CONFIG, "131072"));
}
Here is my consumer code where I'm trying to capture the container metrics
#Component
public class MyBatchConsumer {
private final KafkaListenerEndpointRegistry registry;
#Autowired
public MyBatchConsumer(KafkaListenerEndpointRegistry registry) {
this.registry = registry;
}
#KafkaListener(topics = "myTopic", containerFactory = "kafkaBatchListenerContainerFactory", id = "myBatchConsumer")
public void consumeRecords(List<ConsumerRecord> messages) {
System.out.println("messages size - " + messages.size());
if(mybatchconsumerMessageCount == 0){
ConsumerPerfTestingConstants.batchConsumerStartTime = System.currentTimeMillis();
ConsumerPerfTestingConstants.batchConsumerStartDateTime = LocalDateTime.now().format(DateTimeFormatter.ofPattern("MM/dd/yyyy HH:mm:ss"));
}
mybatchconsumerMessageCount = mybatchconsumerMessageCount + messages.size());
System.out.println("\n\n\n batchConsumerConsumedMessages " + mybatchconsumerMessageCount);
if (mybatchconsumerMessageCount == targetMessageCount) {
System.out.println("ATTENTION! ATTENTION! ATTENTION! Consumer Finished processing " + messageCount + " messages");
registry.getListenerContainerIds().forEach(
listenerId -> System.out.println(" kes batch consumer listenerId is "+listenerId)
);
String listenerID = registry.getListenerContainerIds().stream().filter(listenerId -> listenerId.startsWith("myBatchConsumer")).findFirst().get();
System.out.println(" kes batch consumer listenerID is "+listenerID);
Map<String, Map<MetricName, ? extends Metric>> metrics = registry.getListenerContainer(listenerID).metrics();
registry.getListenerContainer(listenerID).stop();
System.out.println("metrics - "+metrics);
}
}
}
Now, I'm trying to consume 10 records and see what are the metrics look like and i see below values and not sure why. Can someone help me understand what am missing here?
records-consumed-total = 0
records-consumed-rate = 0
This works fine for me; I am using 2.6.2, but the container simply delegates to the consumer when calling metrics.
#SpringBootApplication
public class So64878927Application {
public static void main(String[] args) {
SpringApplication.run(So64878927Application.class, args);
}
#Autowired
KafkaListenerEndpointRegistry registry;
#KafkaListener(id = "so64878927", topics = "so64878927")
void listen(List<String> in) {
System.out.println(in);
Map<String, Map<MetricName, ? extends Metric>> metrics = registry.getListenerContainer("so64878927").metrics();
System.out.println("L: " + metrics.get("consumer-so64878927-1").entrySet().stream()
.filter(entry -> entry.getKey().name().startsWith("records-consumed"))
.map(entry -> entry.getValue().metricName().name() + " = " + entry.getValue().metricValue())
.collect(Collectors.toList()));
registry.getListenerContainer("so64878927").stop(() -> System.out.println("Stopped"));
}
#Bean
NewTopic topic() {
return TopicBuilder.name("so64878927").build();
}
#EventListener
void idleEvent(ListenerContainerIdleEvent event) {
Map<String, Map<MetricName, ? extends Metric>> metrics = registry.getListenerContainer("so64878927").metrics();
System.out.println("I: " + metrics.get("consumer-so64878927-1").entrySet().stream()
.filter(entry -> entry.getKey().name().startsWith("records-consumed"))
.map(entry -> entry.getValue().metricName().name() + " = " + entry.getValue().metricValue())
.collect(Collectors.toList()));
}
}
spring.kafka.listener.type=batch
spring.kafka.listener.idle-event-interval=6000
[foo, bar, baz, foo, bar, baz]
L: [records-consumed-total = 6.0, records-consumed-rate = 0.1996472897880411, records-consumed-total = 6.0, records-consumed-rate = 0.1996539331824837]
I am not sure why the metrics are duplicated but, as I said, all we do is call the consumer's metrics method.
By the way, if you want to stop the container from the listener, you should use the async stop - see my example.

I am trying to implement NotificationChannel and WorkManager but somehow its not working and am not seeing anything Wrong

I am trying to implement a feature where you choose a date and time and the notification pops up on your phone. so after writing some code its still not working but everything seems fine
Activity code
FloatingActionButton fab = findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
#RequiresApi(api = Build.VERSION_CODES.M)
#Override
public void onClick(View view) {
Calendar customCalendar = GregorianCalendar.getInstance();
DatePicker dp = findViewById(R.id.date_picker);
TimePicker picker = findViewById(R.id.time_picker);
customCalendar.set(
dp.getYear(), dp.getMonth(), dp.getDayOfMonth(), picker.getHour(), picker.getMinute(), 0);
long customTime = customCalendar.getTimeInMillis();
SimpleDateFormat sdf = new SimpleDateFormat(getString(R.string.notification_schedule_pattern), Locale.getDefault());
long currentTime = System.currentTimeMillis();
Log.d("time", "cistomTime " + customTime);
Log.d("time", "cistomTime " + currentTime);
if (customTime > currentTime) {
Data data = new Data.Builder().putInt(NOTIFICATION_ID, 0).build();
int delay = (int) (customTime - currentTime);
scheduleNotification(delay, data);
String titleNotificationSchedule = getString(R.string.notification_schedule_title);
Snackbar.make(
view,
titleNotificationSchedule + sdf
.format(customCalendar.getTime()),
LENGTH_LONG).show();
// Snackbar.make(coordinatorLayout, "Reminder set", LENGTH_LONG)
// .setAction("Action", null).show();
} else {
String errorNotificationSchedule = "Error occured";
Snackbar.make(coordinatorLayout, errorNotificationSchedule, LENGTH_LONG).show();
}
}
});
}
private void scheduleNotification(long delay, Data data) {
OneTimeWorkRequest notificationWork = new OneTimeWorkRequest.Builder(NotifyWork.class)
.setInitialDelay(delay, MILLISECONDS).setInputData(data).build();
WorkManager instanceWorkManager = WorkManager.getInstance(getApplicationContext());
instanceWorkManager.beginUniqueWork(NOTIFICATION_WORK, REPLACE, notificationWork).enqueue();
}
Worker class
public class NotifyWork extends Worker {
public static final String NOTIFICATION_ID = "notification_id";
public static final String NOTIFICATION_NAME = "Remember";
public static final String NOTIFICATION_CHANNEL = "Reminder_Channel";
public static final String NOTIFICATION_WORK = "Notification_Work";
public NotifyWork(#NonNull Context context, #NonNull WorkerParameters workerParams) {
super(context, workerParams);
}
#NonNull
#Override
public Result doWork() {
int id = getInputData().getInt(NOTIFICATION_ID, 0);
sendNotification(id);
return Result.success();
}
private void sendNotification(int id) {
NotificationManager notificationManager = (NotificationManager) getApplicationContext()
.getSystemService(Context.NOTIFICATION_SERVICE);
Bitmap bitmap = BitmapFactory.decodeResource(getApplicationContext().getResources(), R.drawable.ic_done_white_24dp);
Intent intent = new Intent(getApplicationContext(), MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
intent.putExtra(NOTIFICATION_ID, id);
String titleNotification = "Reminder";
String subtitleNotification = "Time To WakeUp";
PendingIntent pendingIntent = PendingIntent.getActivity(getApplicationContext(), 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
NotificationCompat.Builder notification = new NotificationCompat.Builder(getApplicationContext(), NOTIFICATION_CHANNEL)
.setLargeIcon(bitmap).setContentTitle(titleNotification)
.setContentText(subtitleNotification).setDefaults(IMPORTANCE_DEFAULT).setSound(getDefaultUri(TYPE_NOTIFICATION))
.setContentIntent(pendingIntent).setAutoCancel(true);
notification.setPriority(IMPORTANCE_MAX);
notificationManager.notify(id, notification.build());
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
Uri ringtoneManager = getDefaultUri(TYPE_NOTIFICATION);
AudioAttributes audioAttributes = new AudioAttributes.Builder().setUsage(USAGE_NOTIFICATION_RINGTONE)
.setContentType(CONTENT_TYPE_SONIFICATION).build();
NotificationChannel channel = new NotificationChannel(NOTIFICATION_CHANNEL, NOTIFICATION_NAME, NotificationManager.IMPORTANCE_DEFAULT);
channel.enableLights(true);
channel.setLightColor(RED);
channel.enableVibration(true);
channel.setSound(ringtoneManager, audioAttributes);
notificationManager.createNotificationChannel(channel);
}
}
I have a DatePicker and TimePicker, when you select date and time and click on the FAB button, you get notified at that particular time
somehow changing .setLargeIcon to .setSmallIcon and referencing the image directly without converting to bitmap eg .setSmallIcon(R.drawable.ic_done_white_24dp) solved the issue

Resources