java写kafka的生产者与消费者代码
2017-08-15 18:37
627 查看
1、环境说明:
OS:redhat6.5 ,cloudera-mamager5.7 ,zookeeper-3.4.5 ,kafka-0.9.0
2、kafka的配置:
![](https://img-blog.csdn.net/20170815183912630?watermark/2/text/aHR0cDovL2Jsb2cuY3Nkbi5uZXQvbmFubmFuMTIzMg==/font/5a6L5L2T/fontsize/400/fill/I0JBQkFCMA==/dissolve/70/gravity/Center)
zookeeper和kafka都是默认配置
3、引用的包:
![](https://img-blog.csdn.net/20170815183938799)
4、producer程序:
package com.kafka;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;
public class KafkaProducer extends Thread{
private String topic;
public KafkaProducer(String topic){
super();
this.topic = topic;
}
@Override
public void run() {
Producer<Integer, String> producer = createProducer();
int i=0;
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");//设置日期格式
while(true){
producer.send(new KeyedMessage<Integer, String>(topic, "时间:"+ df.format(new Date())+",message: " + i++));
System.out.println("发送时间:"+ df.format(new Date())+",message: " + i);
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private Producer<Integer, String> createProducer() {
Properties properties = new Properties();
properties.put("zookeeper.connect", "10.2.46.129:2181,10.2.46.130:2181,10.2.46.131:2181");//zookeeper安装在机器IP
properties.put("serializer.class", StringEncoder.class.getName());
properties.put("metadata.broker.list", "10.2.46.131:9092");// kafka安装的机器IP
return new Producer<Integer, String>(new ProducerConfig(properties));
}
public static void main(String[] args) {
new KafkaProducer("333").start();// 使用kafka集群中创建好的主题
}
}
5、consumer程序:
package com.kafka;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
public class KafkaConsumerEx {
private static final Logger logger = LoggerFactory.getLogger(KafkaConsumerEx.class);
public static void main(String[] args) {
KafkaConsumerEx kc =new KafkaConsumerEx();
kc.testConsumer();
}
public void testConsumer()
{
b("333");
}
private void b(String topic)
{
ConsumerConnector consumer = createConsumer();
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, 1); // 一次从主题中获取一个数据
Map<String, List<KafkaStream<byte[], byte[]>>>messageStreams = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");//设置日期格式
while(iterator.hasNext()){
String message = new String(iterator.next().message());
System.out.println("接收到时间:"+ df.format(new Date())+",message: " + message);
}
}
private ConsumerConnector createConsumer() {
Properties props = new Properties();
props.put("zookeeper.connect", "10.2.46.129:2181,10.2.46.130:2181,10.2.46.131:2181/kafka");//声明zk
props.put("group.id", "555");
return Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
}
}
OS:redhat6.5 ,cloudera-mamager5.7 ,zookeeper-3.4.5 ,kafka-0.9.0
2、kafka的配置:
zookeeper和kafka都是默认配置
3、引用的包:
4、producer程序:
package com.kafka;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;
public class KafkaProducer extends Thread{
private String topic;
public KafkaProducer(String topic){
super();
this.topic = topic;
}
@Override
public void run() {
Producer<Integer, String> producer = createProducer();
int i=0;
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");//设置日期格式
while(true){
producer.send(new KeyedMessage<Integer, String>(topic, "时间:"+ df.format(new Date())+",message: " + i++));
System.out.println("发送时间:"+ df.format(new Date())+",message: " + i);
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private Producer<Integer, String> createProducer() {
Properties properties = new Properties();
properties.put("zookeeper.connect", "10.2.46.129:2181,10.2.46.130:2181,10.2.46.131:2181");//zookeeper安装在机器IP
properties.put("serializer.class", StringEncoder.class.getName());
properties.put("metadata.broker.list", "10.2.46.131:9092");// kafka安装的机器IP
return new Producer<Integer, String>(new ProducerConfig(properties));
}
public static void main(String[] args) {
new KafkaProducer("333").start();// 使用kafka集群中创建好的主题
}
}
5、consumer程序:
package com.kafka;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
public class KafkaConsumerEx {
private static final Logger logger = LoggerFactory.getLogger(KafkaConsumerEx.class);
public static void main(String[] args) {
KafkaConsumerEx kc =new KafkaConsumerEx();
kc.testConsumer();
}
public void testConsumer()
{
b("333");
}
private void b(String topic)
{
ConsumerConnector consumer = createConsumer();
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, 1); // 一次从主题中获取一个数据
Map<String, List<KafkaStream<byte[], byte[]>>>messageStreams = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");//设置日期格式
while(iterator.hasNext()){
String message = new String(iterator.next().message());
System.out.println("接收到时间:"+ df.format(new Date())+",message: " + message);
}
}
private ConsumerConnector createConsumer() {
Properties props = new Properties();
props.put("zookeeper.connect", "10.2.46.129:2181,10.2.46.130:2181,10.2.46.131:2181/kafka");//声明zk
props.put("group.id", "555");
return Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
}
}
相关文章推荐
- Kafka java api-消费者代码与消费分析、生产者消费者配置文件详解
- kafka生产者与消费者java代码示例
- kafka生产者和消费者的javaAPI的示例代码
- kafka集群搭建和使用Java写kafka生产者消费者
- kafka生产者、消费者代码示例
- Kafka生产者消费者java示例(包含Avro序列化)
- kafka生产者和消费者的javaAPI demo
- 【Java并发编程】之十三:生产者—消费者模型(含代码)
- 手撕代码(三)Java多线程模拟生产者消费者方法
- 生产者——消费者模型的java代码实现
- kafka集群搭建和使用Java写kafka生产者消费者
- java挑战高并发 之(11):生产者—消费者模型(含代码)
- kafka 0.10.1.0 生产者消费者代码
- kafka集群搭建和使用Java写kafka生产者消费者
- Java线程同步:生产者-消费者 模型(代码示例)
- kafka集群搭建和使用Java写kafka生产者消费者
- afka集群搭建和使用Java写kafka生产者消费者
- kafka集群搭建和使用Java写kafka生产者消费者
- 使用java创建kafka的生产者和消费者
- Java 多线程 生产者与消费者问题测试代码