kafka java 生产者消费者demo
2017-10-27 14:14
323 查看
生产者:
import java.io.File;import java.io.FileInputStream;
import java.util.Properties;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import com.alibaba.fastjson.JSON;
public class KafkaProduce {
private static Properties properties;
static {
properties = new Properties();
String path = KafkaProduce.class.getResource("/").getFile().toString() + "kafka.properties";
try {
FileInputStream fis = new FileInputStream(new File(path));
properties.load(fis);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 发送消息
*
* @param topic
* @param key
* @param value
*/
public void sendMsg(String topic, String key, String value) {
System.err.println("properties:" + JSON.toJSONString(properties));
// 实例化produce
KafkaProducer<String, String> kp = new KafkaProducer<String, String>(properties);
System.err.println("kp:" + JSON.toJSONString(kp));
// 消息封装
ProducerRecord<String, String> pr = new ProducerRecord<String, String>(topic, key, value);
// 发送数据
// kp.send(pr);
kp.send(pr, new Callback() {
// 回调函数
@Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
if (null != exception) {
System.out.println("记录的offset在:" + metadata.offset());
System.out.println(exception.getMessage() + exception);
}
}
});
// 关闭produce
kp.close();
}
}
消费者:
import java.io.File;import java.io.FileInputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
public class KafkaConsume {
private final static String TOPIC = "test";
private static Properties properties;
static {
properties = new Properties();
String path = KafkaConsume.class.getResource("/").getFile().toString() + "kafka.properties";
System.err.println("path:" + path);
try {
FileInputStream fis = new FileInputStream(new File(path));
properties.load(fis);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 获取消息
*
* @throws Exception
*/
public static void getMsg() throws Exception {
ConsumerConfig config = new ConsumerConfig(properties);
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(TOPIC, new Integer(1));
StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
KafkaStream<String, String> stream = consumerMap.get(TOPIC).get(0);
ConsumerIterator<String, String> it = stream.iterator();
while (it.hasNext()) {
MessageAndMetadata<String, String> keyVlaue = it.next();
String key = keyVlaue.key();
String value = keyVlaue.message();
System.err.println("key:" + key + " ; value:" + value);
}
}
}
kafka.properties配置信息:
##produce
bootstrap.servers=10.20.135.32:9092
producer.type=sync
request.required.acks=1
serializer.class=kafka.serializer.DefaultEncoder
key.serializer=org.apache.kafka.common.serialization.StringSerializer
value.serializer=org.apache.kafka.common.serialization.StringSerializer
bak.partitioner.class=kafka.producer.DefaultPartitioner
bak.key.serializer=org.apache.kafka.common.serialization.StringSerializer
bak.value.serializer=org.apache.kafka.common.serialization.StringSerializer
##consume
zookeeper.connect=10.20.135.32:2181
group.id=test-consumer-group
zookeeper.session.timeout.ms=4000
zookeeper.sync.time.ms=200
enable.auto.commit=false
auto.commit.interval.ms=1000
auto.offset.reset=smallest
serializer.class=kafka.serializer.StringEncoder
相关文章推荐
- kafka-3python生产者和消费者实用demo
- Go语言模拟一个生产者消费者的Demo
- 在Windows环境中安装并使用kafka以及生产者消费者Demo
- Poco::Thread 生产者消费者Demo
- 图文并茂的生产者消费者应用实例demo
- RabbitMQ消息队列之二:消费者和生产者 Demo
- Java多线程之生产者消费者demo
- Java 写一个生产者和消费者的多线程Demo
- Napajs demo-单个生产者/消费者
- 生产者与消费者--demo1---bai
- 生产者与消费者---demo2---boke
- Napajs demo-多个生产者/消费者
- Disruptor多个消费者独立处理生产者消息的简单demo
- RocketMQ生产者消费者DEMO
- 多线程学习Demo注解(3)——生产者和消费者
- Disruptor多个消费者不重复处理生产者发送的消息的demo
- ActiveMQ 生产者和消费者demo
- 多线程生产者消费者Demo2_ Lock操作
- rabbitMQ 生产者、消费者、交换机 整合spring监听 DEMO
- [java线程同步]生产者消费者问题demo