<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
spring-kafka version会使用spring boot 对应版本
启用spring-kafka
在spring boot 配置类上添加
@EnableKafka
配置kafka
在application.properties中添加配置
生产者
#Kafka producer
spring.kafka.producer.bootstrap-servers=168.61.2.47:9092,168.61.2.48:9092,168.61.2.49:9092
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.ByteArraySerializer
spring.kafka.producer.batch-size=65536
spring.kafka.producer.buffer-memory=524288
消费者者
#Kafka consumer
spring.kafka.consumer.group-id=thfx00
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.bootstrap-servers=168.61.2.47:9092,168.61.2.48:9092,168.61.2.49:9092
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.ByteArrayDeserializer
项目中使用
package com.htsc.thfx.kafka;
import com.alibaba.fastjson.JSONObject;
import com.google.protobuf.InvalidProtocolBufferException;
import com.htsc.mdc.model.MDSecurityRecordProtos;
import com.htsc.mdc.model.MDStockRecordProtos;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
@Component
public class kafkaTest {
// 泛型与生产者key,value配置对应
@Resource
KafkaTemplate<String,byte[]> template;
@Scheduled(fixedRate = 1000*10)
public void send() {
template.send("thfx-test00", "2", "你好00".getBytes());
template.send("thfx-test01", "2","你好01".getBytes());
}
// 泛型与消费者key,value配置对应
@KafkaListener(topics = {"thfx-test00","thfx-test01"})
public void consumerRecord00(ConsumerRecord<String, byte[]> record) {
String s = new String(record.value());
System.out.println(s);
}
@KafkaListener(topics = {"PT-MDC-XSHG-INDEXTYPE"})
public void consumerRecord01(ConsumerRecord<String, byte[]> record) throws InvalidProtocolBufferException {
MDSecurityRecordProtos.MDSecurityRecord mdSecurityRecord = MDSecurityRecordProtos.MDSecurityRecord.parseFrom(record.value());
String s = mdSecurityRecord.toString();
System.out.println("PT-MDC-XSHG-INDEXTYPE : " +s);
}
}