漫游kafka实战篇之搭建Kafka开发环境
- <dependency>
- <groupId> org.apache.kafka</groupId >
- <artifactId> kafka_2.10</artifactId >
- <version> 0.8.0</ version>
- </dependency>
- package com.sohu.kafkademon;
- public interface KafkaProperties
- {
- final static String zkConnect = "10.22.10.139:2181";
- final static String groupId = "group1";
- final static String topic = "topic1";
- final static String kafkaServerURL = "10.22.10.139";
- final static int kafkaServerPort = 9092;
- final static int kafkaProducerBufferSize = 64 * 1024;
- final static int connectionTimeOut = 20000;
- final static int reconnectInterval = 10000;
- final static String topic2 = "topic2";
- final static String topic3 = "topic3";
- final static String clientId = "SimpleConsumerDemoClient";
- }
- package com.sohu.kafkademon;
- import java.util.Properties;
- import kafka.producer.KeyedMessage;
- import kafka.producer.ProducerConfig;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaProducer extends Thread
- {
- private final kafka.javaapi.producer.Producer<Integer, String> producer;
- private final String topic;
- private final Properties props = new Properties();
- public KafkaProducer(String topic)
- {
- props.put("serializer.class", "kafka.serializer.StringEncoder");
- props.put("metadata.broker.list", "10.22.10.139:9092");
- producer = new kafka.javaapi.producer.Producer<Integer, String>(new ProducerConfig(props));
- this.topic = topic;
- }
- @Override
- public void run() {
- int messageNo = 1;
- while (true)
- {
- String messageStr = new String("Message_" + messageNo);
- System.out.println("Send:" + messageStr);
- producer.send(new KeyedMessage<Integer, String>(topic, messageStr));
- messageNo++;
- try {
- sleep(3000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- }
- }
- package com.sohu.kafkademon;
- import java.util.HashMap;
- import java.util.List;
- import java.util.Map;
- import java.util.Properties;
- import kafka.consumer.ConsumerConfig;
- import kafka.consumer.ConsumerIterator;
- import kafka.consumer.KafkaStream;
- import kafka.javaapi.consumer.ConsumerConnector;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaConsumer extends Thread
- {
- private final ConsumerConnector consumer;
- private final String topic;
- public KafkaConsumer(String topic)
- {
- consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
- createConsumerConfig());
- this.topic = topic;
- }
- private static ConsumerConfig createConsumerConfig()
- {
- Properties props = new Properties();
- props.put("zookeeper.connect", KafkaProperties.zkConnect);
- props.put("group.id", KafkaProperties.groupId);
- props.put("zookeeper.session.timeout.ms", "40000");
- props.put("zookeeper.sync.time.ms", "200");
- props.put("auto.commit.interval.ms", "1000");
- return new ConsumerConfig(props);
- }
- @Override
- public void run() {
- Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
- topicCountMap.put(topic, new Integer(1));
- Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
- KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
- ConsumerIterator<byte[], byte[]> it = stream.iterator();
- while (it.hasNext()) {
- System.out.println("receive:" + new String(it.next().message()));
- try {
- sleep(3000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- }
- }
- package com.sohu.kafkademon;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaConsumerProducerDemo
- {
- public static void main(String[] args)
- {
- KafkaProducer producerThread = new KafkaProducer(KafkaProperties.topic);
- producerThread.start();
- KafkaConsumer consumerThread = new KafkaConsumer(KafkaProperties.topic);
- consumerThread.start();
- }
- }
- package com.sohu.kafkademon;
- import java.util.HashMap;
- import java.util.List;
- import java.util.Map;
- import java.util.Properties;
- import kafka.consumer.ConsumerConfig;
- import kafka.consumer.ConsumerIterator;
- import kafka.consumer.KafkaStream;
- import kafka.javaapi.consumer.ConsumerConnector;
- /**
- * @author leicui bourne_cui@163.com
- */
- public class KafkaConsumer extends Thread
- {
- private final ConsumerConnector consumer;
- private final String topic;
- public KafkaConsumer(String topic)
- {
- consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
- createConsumerConfig());
- this.topic = topic;
- }
- private static ConsumerConfig createConsumerConfig()
- {
- Properties props = new Properties();
- props.put("zookeeper.connect", KafkaProperties.zkConnect);
- props.put("group.id", KafkaProperties.groupId);
- props.put("zookeeper.session.timeout.ms", "40000");
- props.put("zookeeper.sync.time.ms", "200");
- props.put("auto.commit.interval.ms", "1000");
- return new ConsumerConfig(props);
- }
- @Override
- public void run() {
- Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
- topicCountMap.put(topic, new Integer(1));
- Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
- KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
- ConsumerIterator<byte[], byte[]> it = stream.iterator();
- while (it.hasNext()) {
- System.out.println("receive:" + new String(it.next().message()));
- try {
- sleep(3000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- }
- }
漫游kafka实战篇之搭建Kafka开发环境的更多相关文章
- 漫游kafka实战篇之搭建Kafka开发环境(3)
上篇文章中我们搭建了kafka的服务器,并可以使用Kafka的命令行工具创建topic,发送和接收消息.下面我们来搭建kafka的开发环境. 添加依赖 搭建开发环境需要引入kafka的jar包 ...
- 漫游Kafka实战篇之搭建Kafka运行环境
接下来一步一步搭建Kafka运行环境. Step 1: 下载Kafka 点击下载最新的版本并解压. > tar -xzf kafka_2.9.2-0.8.1.1.tgz > cd kafk ...
- 漫游Kafka实战篇之搭建Kafka运行环境(2)
接下来一步一步搭建Kafka运行环境. Step 1: 下载Kafka 点击下载最新的版本并解压. > tar -xzf kafka_2.9.2-0.8.1.1.tgz > cd kafk ...
- 2017.2.20 activiti实战--第二章--搭建Activiti开发环境及简单示例(二)简单示例
学习资料:<Activiti实战> 第一章 搭建Activiti开发环境及简单示例 2.5 简单流程图及其执行过程 (1)leave.bpmn 后缀名必须是bpmn.安装了activiti ...
- activiti实战--第二章--搭建Activiti开发环境及简单示例
(一)搭建开发环境 学习资料:<Activiti实战> 第一章 认识Activiti 2.1 下载Activiti 官网:http://activiti.org/download.html ...
- 你必须知道的指针基础-1.预备篇:搭建GCC开发环境
一.关于GCC编译器 GCC(GNU Compiler Collection)是一套功能强大.性能优越的编程语言编译器,它是GNU计划的代表作品之一.GCC是Linux平台下最常用的编译器,GCC原名 ...
- 2017.2.20 activiti实战--第二章--搭建Activiti开发环境及简单示例(一)搭建开发环境
学习资料:<Activiti实战> 第一章 认识Activiti 2.1 下载Activiti 官网:http://activiti.org/download.html 进入下载页后,可以 ...
- vue第一篇(搭建vue开发环境)
1.下载node并安装 下载地址: https://nodejs.org/zh-cn/ 下载后双击文件安装 2.检查是否安装成功 node -v v10.16.0 npm -v 6.9.0 如果能正常 ...
- Visual Studio搭建Python开发环境
一.搭建开发环境 1.创建工程: 2.下载环境: 创建好工作以后,点击运行,就会出现下面这个界面,然后点击下载,并安装 http://jingyan.baidu.com/article/fec4bce ...
随机推荐
- Mac OS 上设置 JAVA_HOME
Mac OS 上设置 JAVA_HOME 原文链接:http://han.guokai.blog.163.com/blog/static/136718271201301183938165/ 由于需要, ...
- Extjs利用vtype验证表单
Ext.create('Ext.form.Panel', { title: '表单验证', renderTo: Ext.getBody(), frame ...
- 利用 NGINX 最大化 Python 性能,第一部分:Web 服务和缓存
[编者按]本文主要介绍 nginx 的主要功能以及如何通过 NGINX 优化 Python 应用性能.本文系国内 ITOM 管理平台 OneAPM 编译呈现. Python 的著名之处在于使用简单方便 ...
- 【mysql5.6】连接vs2010
参考这篇博客:http://www.tuicool.com/articles/mUZNne 配置:vs2010项目属性里面配置包含目录和库目录. 包含目录:C:\Program Files\MySQL ...
- 有N个大小不等的自然数(1--N),请将它们由小到大排序。要求程序算法:时间复杂度为O(n),空间复杂度为O(1)。
#include<stdio.h> int main() { ]={,,,,,,,,}; int i,tmp; ;i<;i++) { ) { tmp=a[i]; a[i]=a[a[i ...
- ajax:serialize() 获取表单集合
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/ ...
- Java:日历类、日期类、数学类、运行时类、随机类、系统类
一:Calendar类 java.util 抽象类Calendar 1.static Calendar getInstance()使用默认时区和语言环境获得一个日历. 2. int get(int ...
- gdb与adb相关命令总结
在使用gdb与adb时需要注意一些类似于权限的问题,比如设备需要root,设备root后命令行下需要 切换用户到root用户下操作,又比如相关的目录或文件是否有足够的权限等等,总结为如下: (以下示例 ...
- python流程控制语句 for循环 - 1
Python中for循环语句是通过遍历某一序列对象(元组.列表.字典等)来构建循环,循环结束的条件就是遍历对象完成. 语法形式: for <循环变量> in <遍历对象>: & ...
- HTML5塔防游戏——《三国塔防》 - Yorhom's Game Box
h3{ font-size:20px; } HTML5塔防游戏--<三国塔防> 游戏介绍: <三国塔防>是一款基于HTML5和Javascript的2D塔防游戏.游戏中除了塔防 ...