Search code examples
springspring-bootjunitmockitospring-kafka

Unit Testing Spring Stream Kafka


I am trying to unit test kafka usage with spring boot but having trouble with the input channel. Below is and extract of what I am doing.

public interface MyCustomStreamBinding{
   @Input
   SubscribableChannel consumeChannel();

   @Output
   MessageChannel produceChannel();
}

@EnableBinding(value = { Source.class, MyCustomStreamBinding.class })
public class StreamConfiguration {
...
}

@Service
public class MyService {

  private final MyCustomStreamBinding streamBinding;
  public MyService(MyCustomStreamBinding streamBinding) {
    this.streamBinding = streamBinding;
  }

  public void sendMessage() {
    streamBinding.produceChannel().send(new SomeObject);
  }

  @StreamListener("consumeChannel")
  public void consumeChannel(SomeObject payload){
    // do processing of payload
  }
}

Then in my test cases I have

@SpringBootTest(classes = {MyApp.class})
class MyServiceTest {
  private MyService myService;

  @Autowired
  private MyCustomStreamBinding streamBinding;
  @Autowired
  private MessageCollector messageCollector;

  @BeforeEach
  public void setup(){
    myService = new MyService(streamBinding);
  }

  @Test
  public void TestMessaging(){
   myService.sendMessage();

   Message<?> m = messageCollector.forChannel(streamBinding.produceChannel()).poll();
   assertThat(m.getPayload(), equalTo(new SomeObject()));
  }
}

How do I test the consumeChannel and that it actually performed the processing as expected?


Solution

  • Here I have one example that is compose by 2 listener for consuming data and producing data. Together with @SpringBootTest you can disable the web service using @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {"server.port=0"}). Then use JUnit5 @ExtendWith(SpringExtension.class), use the embedded kafka cluster @EmbeddedKafka(topics = {"output-topic"}, partitions = 1).

    Take this simple service which receives data on the listener process-in-0, transform it to upper case and emits the new data on the listener process-out-0.

    public interface KafkaListenerBinding {
        @Input("process-in-0")
        KStream<String, String> inputStream();
    
        @Output("process-out-0")
        KStream<String, String> outStream();
    }
    
    @Service
    @EnableBinding(KafkaListenerBinding.class)
    public class KafkaListenerService {
    
        @StreamListener("process-in-0")
        @SendTo("process-out-0")
        public KStream<String, String> transformToUpperCase(KStream<String, String> input) {
            input.peek((k, v) -> log.info("Received Input: {}", v));
            return input.mapValues(v -> v.toUpperCase());
        }
    }
    

    to test it use the embedded kafka cluster. Note that the actual kafka claster does not have to be available. Then you can use the property brokers: ${spring.embedded.kafka.brokers}.

    @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {"server.port=0"})
    @ExtendWith(SpringExtension.class)
    @EmbeddedKafka(topics = {"output-topic"}, partitions = 1)
    @TestPropertySource(properties = {
            "spring.kafka.producer.bootstrap-servers=${spring.embedded.kafka.brokers}",
            "spring.kafka.admin.properties.bootstrap.servers=${spring.embedded.kafka.brokers}"
    })
    public class KafkaListenerServiceTest {
    
        @Autowired
        EmbeddedKafkaBroker embeddedKafkaBroker;
        @SpyBean
        KafkaListenerService kafkaListenerServiceSpy;
        private Consumer<String, String> consumer;
    
        @BeforeEach
        public void setUp() {
            Map<String, Object> configs = new HashMap<>(KafkaTestUtils.consumerProps("group1", "true", embeddedKafkaBroker));
            consumer = new DefaultKafkaConsumerFactory<>(configs, new StringDeserializer(), new StringDeserializer()).createConsumer();
            embeddedKafkaBroker.consumeFromAllEmbeddedTopics(consumer);
        }
    
        @AfterEach
        public void tearDown() {
            consumer.close();
        }
    
        @Test
        public void SimpleProcessorApplicationTest() throws ExecutionException, InterruptedException {
            Set<String> actualResultSet = new HashSet<>();
            Set<String> expectedResultSet = new HashSet<>();
            expectedResultSet.add("HELLO1");
            expectedResultSet.add("HELLO2");
    
            Map<String, Object> senderProps = producerProps(embeddedKafkaBroker);
            DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
            try {
                KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
                template.setDefaultTopic("input-topic");
    
                template.sendDefault("hello1").get();
                verify(kafkaListenerServiceSpy, times(1)).transformToUpperCase(isA(KStream.class));
    
                template.sendDefault("hello2").get();
                verify(kafkaListenerServiceSpy, times(1)).transformToUpperCase(isA(KStream.class));
    
                int receivedAll = 0;
                while (receivedAll < 2) {
                    ConsumerRecords<String, String> cr = getRecords(consumer);
                    receivedAll = receivedAll + cr.count();
                    cr.iterator().forEachRemaining(r -> {
                        System.out.println("result: " + r.value());
                        actualResultSet.add(r.value());
                    });
                }
    
                assertThat(actualResultSet.equals(expectedResultSet)).isTrue();
            } finally {
                pf.destroy();
            }
        }
    }
    

    and configure your application.yml file like this and make sure to not enable the schema registry using schema.registry.url: not-used:

    spring:
      kafka:
        consumer:
          group-id: group-01
      cloud:
        stream:
          bindings:
            process-in-0:
              destination: input-topic
            process-out-0:
              destination: output-topic
            notification-input-channel:
              destination: pos-topic
          kafka:
            streams:
              binder:
                brokers: ${spring.embedded.kafka.brokers}
                configuration:
                  schema.registry.url: not-used
                  commit.interval.ms: 100
                  default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
                  default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde
              bindings:
                process-in-0:
                  consumer:
                    valueSerde: org.apache.kafka.common.serialization.Serdes$StringSerde
                process-out-0:
                  producer:
                    valueSerde: org.apache.kafka.common.serialization.Serdes$StringSerde
    ---