|
23 | 23 | import static org.springframework.kafka.test.assertj.KafkaConditions.timestamp;
|
24 | 24 | import static org.springframework.kafka.test.assertj.KafkaConditions.value;
|
25 | 25 |
|
| 26 | +import java.util.ArrayList; |
26 | 27 | import java.util.Iterator;
|
27 | 28 | import java.util.List;
|
28 | 29 | import java.util.Map;
|
29 | 30 | import java.util.concurrent.CountDownLatch;
|
30 | 31 | import java.util.concurrent.TimeUnit;
|
| 32 | +import java.util.concurrent.atomic.AtomicInteger; |
31 | 33 | import java.util.concurrent.atomic.AtomicReference;
|
32 | 34 |
|
33 | 35 | import org.apache.kafka.clients.consumer.Consumer;
|
|
49 | 51 | import org.junit.Test;
|
50 | 52 |
|
51 | 53 | import org.springframework.kafka.support.Acknowledgment;
|
| 54 | +import org.springframework.kafka.support.CompositeProducerListener; |
52 | 55 | import org.springframework.kafka.support.DefaultKafkaHeaderMapper;
|
53 | 56 | import org.springframework.kafka.support.KafkaHeaders;
|
54 | 57 | import org.springframework.kafka.support.ProducerListener;
|
@@ -242,23 +245,44 @@ public void withListener() throws Exception {
|
242 | 245 | DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
|
243 | 246 | KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
|
244 | 247 | template.setDefaultTopic(INT_KEY_TOPIC);
|
245 |
| - final CountDownLatch latch = new CountDownLatch(1); |
246 |
| - template.setProducerListener(new ProducerListener<Integer, String>() { |
| 248 | + final CountDownLatch latch = new CountDownLatch(2); |
| 249 | + final List<ProducerRecord<Integer, String>> records = new ArrayList<>(); |
| 250 | + final List<RecordMetadata> meta = new ArrayList<>(); |
| 251 | + final AtomicInteger onErrorDelegateCalls = new AtomicInteger(); |
| 252 | + class PL implements ProducerListener<Integer, String> { |
247 | 253 |
|
248 | 254 | @Override
|
249 |
| - public void onSuccess(String topic, Integer partition, Integer key, String value, |
250 |
| - RecordMetadata recordMetadata) { |
| 255 | + public void onSuccess(ProducerRecord<Integer, String> record, RecordMetadata recordMetadata) { |
| 256 | + records.add(record); |
| 257 | + meta.add(recordMetadata); |
251 | 258 | latch.countDown();
|
252 | 259 | }
|
253 | 260 |
|
254 |
| - }); |
| 261 | + @Override |
| 262 | + public void onError(ProducerRecord<Integer, String> producerRecord, Exception exception) { |
| 263 | + assertThat(producerRecord).isNotNull(); |
| 264 | + assertThat(exception).isNotNull(); |
| 265 | + onErrorDelegateCalls.incrementAndGet(); |
| 266 | + } |
| 267 | + |
| 268 | + } |
| 269 | + PL pl1 = new PL(); |
| 270 | + PL pl2 = new PL(); |
| 271 | + CompositeProducerListener<Integer, String> cpl = new CompositeProducerListener<>(pl1, pl2); |
| 272 | + template.setProducerListener(cpl); |
255 | 273 | template.sendDefault("foo");
|
256 | 274 | template.flush();
|
257 | 275 | assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
|
| 276 | + assertThat(records.get(0).value()).isEqualTo("foo"); |
| 277 | + assertThat(records.get(1).value()).isEqualTo("foo"); |
| 278 | + assertThat(meta.get(0).topic()).isEqualTo(INT_KEY_TOPIC); |
| 279 | + assertThat(meta.get(1).topic()).isEqualTo(INT_KEY_TOPIC); |
258 | 280 |
|
259 | 281 | //Drain the topic
|
260 | 282 | KafkaTestUtils.getSingleRecord(consumer, INT_KEY_TOPIC);
|
261 | 283 | pf.destroy();
|
| 284 | + cpl.onError(records.get(0), new RuntimeException("x")); |
| 285 | + assertThat(onErrorDelegateCalls.get()).isEqualTo(2); |
262 | 286 | }
|
263 | 287 |
|
264 | 288 | @Test
|
|
0 commit comments