1、pom.xml配置

<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
<version>1.1.1.RELEASE</version>
</dependency>
</dependencies>

2、producer.xml配置

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration"
xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"
xmlns:task="http://www.springframework.org/schema/task"
xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
<int:channel id="inputToKafka">
<int:queue />
</int:channel>
<int-kafka:outbound-channel-adapter
id="kafkaOutboundChannelAdapter" kafka-producer-context-ref="kafkaProducerContext"
auto-startup="false" channel="inputToKafka" order="3">
<int:poller fixed-delay="1000" time-unit="MILLISECONDS"
receive-timeout="0" task-executor="taskExecutor" />
</int-kafka:outbound-channel-adapter>
<task:executor id="taskExecutor" pool-size="5"
keep-alive="120" queue-capacity="500" />
<bean id="producerProperties"
class="org.springframework.beans.factory.config.PropertiesFactoryBean">
<property name="properties">
<props>
<prop key="topic.metadata.refresh.interval.ms">3600000</prop>
<prop key="message.send.max.retries">5</prop>
<prop key="serializer.class">kafka.serializer.StringEncoder</prop>
<prop key="request.required.acks">1</prop>
</props>
</property>
</bean>
<int-kafka:producer-context id="kafkaProducerContext"
producer-properties="producerProperties">
<int-kafka:producer-configurations>
<int-kafka:producer-configuration
broker-list="10.20.0.248:9092" topic="test" compression-codec="default" />
</int-kafka:producer-configurations>
</int-kafka:producer-context>
</beans>

3、consumer.xml配置

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration"
xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"
xmlns:task="http://www.springframework.org/schema/task"
xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka
http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
http://www.springframework.org/schema/integration
http://www.springframework.org/schema/integration/spring-integration.xsd
http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/task
http://www.springframework.org/schema/task/spring-task.xsd"> <int:channel id="inputFromKafka">
<int:queue/>
</int:channel> <int-kafka:inbound-channel-adapter
id="kafkaInboundChannelAdapter" kafka-consumer-context-ref="consumerContext"
auto-startup="false" channel="inputFromKafka">
<int:poller fixed-delay="10" time-unit="MILLISECONDS"
max-messages-per-poll="5" />
</int-kafka:inbound-channel-adapter> <bean id="consumerProperties"
class="org.springframework.beans.factory.config.PropertiesFactoryBean">
<property name="properties">
<props>
<prop key="auto.offset.reset">smallest</prop>
<prop key="socket.receive.buffer.bytes">10485760</prop> <!-- 10M -->
<prop key="fetch.message.max.bytes">5242880</prop>
<prop key="auto.commit.interval.ms">1000</prop>
</props>
</property>
</bean> <int-kafka:consumer-context id="consumerContext"
consumer-timeout="4000" zookeeper-connect="zookeeperConnect" consumer-properties="consumerProperties">
<int-kafka:consumer-configurations>
<int-kafka:consumer-configuration
group-id="mygroup" max-messages="5000">
<int-kafka:topic id="test" streams="4" />
</int-kafka:consumer-configuration>
<!-- <int-kafka:consumer-configuration group-id="default3" value-decoder="kafkaSpecificDecoder"
key-decoder="kafkaReflectionDecoder" max-messages="10"> <int-kafka:topic-filter
pattern="regextopic.*" streams="4" exclude="false" /> </int-kafka:consumer-configuration> -->
</int-kafka:consumer-configurations>
</int-kafka:consumer-context> <int-kafka:zookeeper-connect id="zookeeperConnect"
zk-connect="10.20.0.248:2181" zk-connection-timeout="40000"
zk-session-timeout="40000" zk-sync-time="200" />
</beans>

4、producer.java示例

package kafka.spring_kafka;

import java.util.Random;

import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.messaging.MessageChannel; public class Producer {
private static final String CONFIG = "/context.xml";
private static Random rand = new Random(); public static void main(String[] args) { final ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(CONFIG, Producer.class);
ctx.start(); final MessageChannel channel = ctx.getBean("inputToKafka", MessageChannel.class); for (int i = 0; i < 10; i++) {
//默认
//boolean flag = channel.send(MessageBuilder.withPayload("消息" + rand.nextInt()).build());
//System.out.println(flag);
//指定partition 和 topic
boolean flag = channel.send(MessageBuilder.withPayload("消息" + rand.nextInt()).setHeader("messageKey", String.valueOf(i)).setHeader("topic", "test").build());
System.out.println(flag);
}
ctx.close();
}
}

5、consumer.java示例

package kafka.spring_kafka;

import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.messaging.Message; public class Consumer {
private static final String CONFIG = "/consumer_context.xml";
@SuppressWarnings({ "unchecked", "rawtypes" })
public static void main(String[] args) {
final ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(CONFIG, Consumer.class);
ctx.start(); final QueueChannel channel = ctx.getBean("inputFromKafka", QueueChannel.class);
Message msg;
while((msg = channel.receive()) != null) {
HashMap map = (HashMap)msg.getPayload();
Set<Map.Entry> set = map.entrySet();
for (Map.Entry entry : set) {
String topic = (String)entry.getKey();
System.out.println("Topic:" + topic);
ConcurrentHashMap<Integer,List<byte[]>> messages = (ConcurrentHashMap<Integer,List<byte[]>>)entry.getValue();
Collection<List<byte[]>> values = messages.values();
for (Iterator<List<byte[]>> iterator = values.iterator(); iterator.hasNext();) {
List<byte[]> list = iterator.next();
for (byte[] object : list) {
String message = new String(object);
System.out.println("\tMessage: " + message);
}
}
}
}
ctx.close();
}
}

链接请参见:https://www.oschina.net/code/snippet_1866237_52084http://colobu.com/2014/11/19/kafka-spring-integration-in-practice/

spring-integration-kafka的更多相关文章

  1. Spring 集成Kafka(完整版)

    前面的文章我们已经完成了Kafka基于Zookeeper的集群的搭建了.Kafka集群搭建请点我.记过几天的研究已经实现Spring的集成了.本文重点 jar包准备 集成是基于spring-integ ...

  2. Spring Integration - 自动轮询发送手机短信

    Spring Integration 配置 <?xml version="1.0" encoding="UTF-8"?> <beans xml ...

  3. spring Integration服务总线

    最新项目中使用数据交换平台,主要通过交换平台抓取HIS数据库医生医嘱检查检验等数据以及FTP上的txt文件,html等病程文件,生成XML文件,之后通过业务系统按业务规则对数据进行处理,再将其存入数据 ...

  4. spring boot2 kafka

    一.软件版本 1.linux:centos6 2.zookeeper:zookeeper-3.4.1 3.kafka:kafka_2.12-2.2.0 4.jdk:1.8 5.instelliJ Id ...

  5. Spring Integration实现分布式锁

    学习本篇之前,可以先看下文章 什么是分布式锁,了解下基本概念. 之前都是手写一个分布式锁,其实Spring早就提供了分布式锁的实现.早期,分布式锁的相关代码存在于Spring Cloud的子项目Spr ...

  6. SPRING 集成 KAFKA 发送消息

    准备工作 1.安装kafka+zookeeper环境 2.利用命令创建好topic,创建一个topic my-topic 集成步骤 1.配置生产者 <?xml version="1.0 ...

  7. Spring Integration概述

    1.   Spring Integration概述 1.1     背景 Spring框架的一个重要主题是控制反转.从广义上来说,Spring处理其上下文中管理的组件的职责.只要组件减轻了职责,它们同 ...

  8. ref:Spring Integration Zip 不安全解压(CVE-2018-1261)漏洞分析

    ref:https://mp.weixin.qq.com/s/SJPXdZWNKypvWmL-roIE0Q 0x00 漏洞概览 漏洞名称:Spring Integration Zip不安全解压 漏洞编 ...

  9. [转] Spring Integration 系统集成

    [From] http://blog.csdn.net/w_x_z_/article/details/53316618 pring Ingegration 提供了基于Spring的EIP(Enterp ...

  10. Spring Integration - WS Outbound Gateway

    1.通过MessageSender客户化http连接参数 AbstractHttpWebServiceMessageSender有若干实现子类: - CommonsHttpMessageSender( ...

随机推荐

  1. Remove Nth Node From End of List leetcode java

    题目: Given a linked list, remove the nth node from the end of list and return its head. For example, ...

  2. 添加sqljdbc4的maven依赖

    sqljdbc是微软sql server的jdbc驱动 使用sqljdbc需要从微软的官方网站下载jar包: http://www.microsoft.com/en-us/download/detai ...

  3. 把表单转成json,并且name为key,value为值

    http://jsfiddle.net/sxGtM/3/http://stackoverflow.com/questions/1184624/convert-form-data-to-js-objec ...

  4. 以AVL树为例理解二叉树的旋转(Rotate)操作

    树旋转是在二叉树中的一种子树调整操作, 每一次旋转并不影响对该二叉树进行中序遍历的结果. 树旋转通常应用于需要调整树的局部平衡性的场合. 树旋转包括两个不同的方式, 分别是左旋转和右旋转. 两种旋转呈 ...

  5. C# GDI+技术

    C# GDI+技术 GDI+概述         GDI+是GDI(即Windows早期版本号中附带的Graphics Device Interface)的后继者.它是一种构成Windows XP操作 ...

  6. SpringMVC -jquery实现分页

    效果图: 关键类的代码: package:utils: SpringUtil.java 通过jdbcTemplate连接oracle数据库 package com.utils; import org. ...

  7. PHP基本的语法的小结

    一.PHP能做什么? PHP能做什么?我认为它非常强大,仅仅要我能想到的,它都能做,仅仅是我技术能力还不行╮(╯﹏╰)╭.好吧,一张图.基本了解一下吧(ps:PHP的功能不局限于此( ^_^ )) 图 ...

  8. ionic3打包失败的解决办法

    使用命令行: $ ionic cordova build android --release 打包android的时候,提示生成失败,原因是没有找到gradle,我的电脑因为安装了Android St ...

  9. 栈的实现实例(C语言)

    /* stack.h */ #ifndef _stack_h #define _stack_h struct stack_record; typedef struct stack_record *st ...

  10. vb sendmessage 详解2

    首先我们了解一下Windows的消息机制.Windows是一个消息驱动式系统,Windows消息提供应用程序与应用程序之间,应用程序与Windows系统之间进行通信的手段.举个例子,打开记事本程序,该 ...