flume与Mosquitto的集成
文章来自:http://www.cnblogs.com/hark0623/p/4173714.html 转发请注明
因业务需求,需要flume收集MQTT(Mosquitto)的数据。 方法就是flume自定义source,source中来订阅(subscribe)MQTT
flume source的java代码如下:
package com.yhx.sensor.flume.source; import java.util.HashMap;
import java.util.Map; import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDrivenSource;
import org.apache.flume.conf.Configurable;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.source.AbstractSource;
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.MqttCallback;
import org.eclipse.paho.client.mqttv3.MqttClient;
import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
import org.eclipse.paho.client.mqttv3.MqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.eclipse.paho.client.mqttv3.MqttTopic; public class MQTTSource extends AbstractSource implements EventDrivenSource,
Configurable {
/**
* The initialization method for the Source. The context contains all the
* Flume configuration info, and can be used to retrieve any configuration
* values necessary to set up the Source.
*/
@Override
public void configure(Context arg0) {
// TODO Auto-generated method stub } SimpleMqttClient client = null; /**
* Start any dependent systems and begin processing events.
*/
@Override
public void start() {
// TODO Auto-generated method stub
// super.start();
client = new SimpleMqttClient();
client.runClient();
} /**
* Stop processing events and shut any dependent systems down.
*/
@Override
public void stop() {
// TODO Auto-generated method stub
// super.stop();
if (client != null) {
client.closeConn();
}
} // public static void main(String[] args) {
// SimpleMqttClient smc = new SimpleMqttClient();
// smc.runClient();
// } public class SimpleMqttClient implements MqttCallback { MqttClient myClient;
MqttConnectOptions connOpt; String BROKER_URL = "tcp://192.168.116.128:1883";
String M2MIO_DOMAIN = "192.168.116.128";
String M2MIO_STUFF = "yhx";
String M2MIO_THING = "yhx_flume";
// String M2MIO_USERNAME = "<m2m.io username>";
// String M2MIO_PASSWORD_MD5 =
// "<m2m.io password (MD5 sum of password)>"; Boolean subscriber = true;
Boolean publisher = false; /**
*
* connectionLost This callback is invoked upon losing the MQTT
* connection.
*
*/
@Override
public void connectionLost(Throwable t) {
System.out.println("Connection lost!");
// code to reconnect to the broker would go here if desired
} public void closeConn() {
if (myClient != null) {
if (myClient.isConnected()) {
try {
myClient.disconnect();
} catch (MqttException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
} /**
*
* deliveryComplete This callback is invoked when a message published by
* this client is successfully received by the broker.
*
*/
@Override
public void deliveryComplete(IMqttDeliveryToken token) {
// System.out.println("Pub complete" + new
// String(token.getMessage().getPayload()));
} /**
*
* messageArrived This callback is invoked when a message is received on
* a subscribed topic.
*
*/
@Override
public void messageArrived(String topic, MqttMessage message)
throws Exception {
// System.out
// .println("-------------------------------------------------");
// // System.out.println("| Topic:" + topic.getName());
// System.out.println("| Topic:" + topic);
// System.out
// .println("| Message: " + new String(message.getPayload()));
// System.out
// .println("-------------------------------------------------"); Map<String, String> headers = new HashMap<String, String>();
//headers.put("curDate", df.format(new Date()));
Event flumeEvent = EventBuilder.withBody(message.getPayload(),
headers);
try {
getChannelProcessor().processEvent(flumeEvent);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
} } /**
*
* runClient The main functionality of this simple example. Create a
* MQTT client, connect to broker, pub/sub, disconnect.
*
*/
public void runClient() {
// setup MQTT Client
String clientID = M2MIO_THING;
connOpt = new MqttConnectOptions(); connOpt.setCleanSession(true);
connOpt.setKeepAliveInterval(3000);
// connOpt.setUserName(M2MIO_USERNAME);
// connOpt.setPassword(M2MIO_PASSWORD_MD5.toCharArray()); // Connect to Broker
try {
myClient = new MqttClient(BROKER_URL, clientID);
myClient.setCallback(this);
myClient.connect(connOpt);
} catch (MqttException e) {
e.printStackTrace();
System.exit(-1);
} System.out.println("Connected to " + BROKER_URL); // setup topic
// topics on m2m.io are in the form <domain>/<stuff>/<thing>
String myTopic = M2MIO_DOMAIN + "/" + M2MIO_STUFF + "/"
+ M2MIO_THING;
System.out.println("myTopic:" + myTopic);
MqttTopic topic = myClient.getTopic(myTopic); // subscribe to topic if subscriber
if (subscriber) {
try {
int subQoS = 0;
myClient.subscribe(myTopic, subQoS); } catch (Exception e) {
e.printStackTrace();
}
} // publish messages if publisher
if (publisher) {
for (int i = 1; i <= 10; i++) {
String pubMsg = "{\"pubmsg\":" + i + "}";
int pubQoS = 0;
MqttMessage message = new MqttMessage(pubMsg.getBytes());
message.setQos(pubQoS);
message.setRetained(false); // Publish the message
System.out.println("Publishing to topic \"" + topic
+ "\" qos " + pubQoS);
MqttDeliveryToken token = null;
try {
// publish message to broker
token = topic.publish(message);
// Wait until the message has been delivered to the
// broker
token.waitForCompletion();
Thread.sleep(100);
} catch (Exception e) {
e.printStackTrace();
}
}
} // disconnect
try {
// wait to ensure subscribed messages are delivered
if (subscriber) {
while (true) {
Thread.sleep(5000);
}
}
// myClient.disconnect();
} catch (Exception e) {
e.printStackTrace();
} finally {
}
} } }
打JAR包注意要把Class-Path写上,如下:
Manifest-Version: 1.0
Class-Path: flume-ng-configuration-1.5.2.jar flume-ng-core-1.5.2.jar flume-ng-node-1.5.2.jar flume-ng-sdk-1.5.2.jar org.eclipse.paho.client.mqttv3-1.0.0.jar
将打好的JAR包放到flume的lib目录(注意,class-path说明的jar包在lib一定要有。 如果没有,则放上去)
接着修改一下flume的配置文件,如下(主要是sourceMqtt ,看这个。 因为我这块同时还监听了UDP):
a1.sources = sourceMqtt sourceUdp
a1.sinks = sinkMqtt sinkUdp
a1.channels = channelMqtt channelUdp # Describe/configure the source
a1.sources.sourceMqtt.type = com.yhx.sensor.flume.source.MQTTSource # Describe the sink
a1.sinks.sinkMqtt.type = logger # Use a channel which buffers events in memory
a1.channels.channelMqtt.type = memory
a1.channels.channelMqtt.capacity = 1000
a1.channels.channelMqtt.transactionCapacity = 100 # Bind the source and sink to the channel
a1.sources.sourceMqtt.channels = channelMqtt
a1.sinks.sinkMqtt.channel = channelMqtt # a2.sources = sourceUdp
# a2.sinks = sinkUdp
# a2.channels = channelUdp # Describe/configure the source
a1.sources.sourceUdp.type = syslogudp
a1.sources.sourceUdp.host = 0.0.0.0
a1.sources.sourceUdp.port = 12459
a1.sources.sourceUdp.interceptors=interceptorUdp a1.sources.sourceUdp.interceptors.interceptorUdp.type=com.yhx.sensor.flume.intercepter.UDPIntercepter$Builder # Describe the sink
a1.sinks.sinkUdp.type = logger # Use a channel which buffers events in memory
a1.channels.channelUdp.type = memory
a1.channels.channelUdp.capacity = 1000
a1.channels.channelUdp.transactionCapacity = 100 # Bind the source and sink to the channel
a1.sources.sourceUdp.channels = channelUdp
a1.sinks.sinkUdp.channel = channelUdp
配置文件保存至flume目录下的conf,叫flume.conf
然后flume启动命令如下
bin/flume-ng agent --conf conf --conf-file conf/flume.conf --name a1
flume与Mosquitto的集成的更多相关文章
- 简单测试flume+kafka+storm的集成
集成 Flume/kafka/storm 是为了收集日志文件而引入的方法,最终将日志转到storm中进行分析.storm的分析方法见后面文章,这里只讨论集成方法. 以下为具体步骤及测试方法: 1.分别 ...
- 《Kafka笔记》4、Kafka架构,与其他组件集成
目录 1 kafka架构进阶 1.1 Kafka底层数据的同步机制(面试常问) 1.1.1 高水位截断的同步方式可能带来数据丢失(Kafka 0.11版本前的问题) 1.1.2 解决高水位截断数据丢失 ...
- why big data
很多人都知道大数据很火,就业很好,薪资很高,想往大数据方向发展.但该学哪些技术,学习路线是什么样的呢?用不用参加大数据培训呢?如果自己很迷茫,为了这些原因想往大数据方向发展,也可以,那么大讲台老师就想 ...
- SparkStreaming基础
* SparkStreaming基础 打开之前构建好的Maven工程,如何构建?请参看SparkCore基础(二)的最后部分. 在SparkCore中,我们操作的数据都在RDD中,是Spark的一个抽 ...
- cdh环境下,spark streaming与flume的集成问题总结
文章发自:http://www.cnblogs.com/hark0623/p/4170156.html 转发请注明 如何做集成,其实特别简单,网上其实就是教程. http://blog.csdn.n ...
- spark streaming集成flume
1. 安装flume flume安装,解压后修改flume_env.sh配置文件,指定java_home即可. cp hdfs jar包到flume lib目录下(否则无法抽取数据到hdfs上): $ ...
- 新闻实时分析系统-Flume+HBase+Kafka集成与开发
1.下载Flume源码并导入Idea开发工具 1)将apache-flume-1.7.0-src.tar.gz源码下载到本地解压 2)通过idea导入flume源码 打开idea开发工具,选择File ...
- 大数据笔记(三十二)——SparkStreaming集成Kafka与Flume
三.集成:数据源 1.Apache Kafka:一种高吞吐量的分布式发布订阅消息系统 (1) (*)消息的类型 Topic:主题(相当于:广播) Queue:队列(相当于:点对点) (*)常见的消息系 ...
- 新闻网大数据实时分析可视化系统项目——9、Flume+HBase+Kafka集成与开发
1.下载Flume源码并导入Idea开发工具 1)将apache-flume-1.7.0-src.tar.gz源码下载到本地解压 2)通过idea导入flume源码 打开idea开发工具,选择File ...
随机推荐
- linux下查看当前用户的 三个命令
linux下查看当前用户的 三个命令 1,whoami; 2,id -un; 3,who -H 可以列出当前所有的 NAME (用户名) LINE (窗口列表) TIME(开启时间 ...
- 【原创】express3.4.8源码解析之路由中间件
前言 注意:旧文章转成markdown格式. 跟大家聊一个中间件,叫做路由中间件,它并非是connect中内置的中间件,而是在express中集成进去的. 显而易见,该中间件的用途就是 ------ ...
- 错误 1 未知的服务器标记“asp:ScriptManager”。
如题 ... 解决方案 :将 <%@ Register Assembly="System.Web.Extensions, Version=1.0.61025.0, Culture=ne ...
- 修改Flume-NG的hdfs sink解析时间戳源码大幅提高写入性能
Flume-NG中的hdfs sink的路径名(对应参数"hdfs.path",不允许为空)以及文件前缀(对应参数"hdfs.filePrefix")支持正则解 ...
- JavaScript中的Function(函数)对象详解
JavaScript中的Function对象是函数,函数的用途分为3类: 作为普通逻辑代码容器: 作为对象方法: 作为构造函数. 1.作为普通逻辑代码容器 function multiply(x, y ...
- 入门必看--JavaScript基础
JavaScript他是一种描述性语言,其实他并不难学,只要用心学,一定会学好,我相信大家在看这篇文章的时候,一定也学过HTML吧,使用JavaScript就是为了能和网页有更好的交互,下面切入主题. ...
- 菜鸟带你飞______DP基础26道水题
DP 158:11:22 1205:00:00 Overview Problem Status Rank (56) Discuss Current Time: 2015-11-26 19:11:2 ...
- Mysql_以案例为基准之查询
查询数据操作
- ASP.NET 画图与图像处理-如何直接输出到页面
有时候我们生成的图片并不需要保存到磁盘中,而是直接输出到页面,比如验证码.实时报表等,如何做呢?请参考如下: protected void Page_Load(object sender, E ...
- MSSQL 2008错误提示:更改对于登录sa失败
MSSQL 2008错误提示:更改对于登录sa失败: 使用Windows方式登录数据库后,执行以下命令: EXEC sp_password null,"123456"," ...