自定义http source

config

a1.sources.r1.type=http
a1.sources.r1.bind=localhost
a1.sources.r1.port=
a1.sources.r1.channels=c1
#自定义source Handler
a1.sources.r1.handler = org.apache.flume.sw.source.http.JSONHandler
a1.sources.r1.handler.configHome = /home/www/logs/datareport

handler

public class JSONHandler implements HTTPSourceHandler {

  private static final Logger LOG = LoggerFactory.getLogger(JSONHandler.class);

  public static final String PARA_SIGN = "sign";
public static final String PARA_PROJECT_ID = "projectId";
public static final String PARA_REPORT_MSG = "reportMsg"; private final Type mapType = new TypeToken<LinkedHashMap<String, Object>>() {}.getType();
private final Gson gson; //可以获取外部参数
private Context context = null; public JSONHandler() {
gson = new GsonBuilder().disableHtmlEscaping().create();
} /**
* {@inheritDoc}
*/
@Override
public List<Event> getEvents(HttpServletRequest request) throws Exception {
BufferedReader reader = request.getReader();
String charset = request.getCharacterEncoding();
//UTF-8 is default for JSON. If no charset is specified, UTF-8 is to
//be assumed.
if (charset == null) {
LOG.debug("Charset is null, default charset of UTF-8 will be used.");
charset = "UTF-8";
} else if (!(charset.equalsIgnoreCase("utf-8")
|| charset.equalsIgnoreCase("utf-16")
|| charset.equalsIgnoreCase("utf-32"))) {
LOG.error("Unsupported character set in request {}. "
+ "JSON handler supports UTF-8, "
+ "UTF-16 and UTF-32 only.", charset);
throw new UnsupportedCharsetException("JSON handler supports UTF-8, "
+ "UTF-16 and UTF-32 only.");
} /*
* Gson throws Exception if the data is not parseable to JSON.
* Need not catch it since the source will catch it and return error.
*/
LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
try {
map = gson.fromJson(reader, mapType);
} catch (JsonSyntaxException ex) {
throw new HTTPBadRequestException("Request has invalid JSON Syntax.", ex);
} String configHome = this.context.getString("configHome");
LOG.info(configHome);
String projectId = map.get(PARA_PROJECT_ID).toString();
String reportMsg = map.get(PARA_REPORT_MSG).toString();
Map<String, String> headers = new HashMap<String, String>();
headers.put(PARA_PROJECT_ID, projectId);
headers.put(PARA_SIGN, "");
JSONEvent jsonEvent = new JSONEvent();
jsonEvent.setHeaders(headers);
jsonEvent.setBody(reportMsg.getBytes()); return getSimpleEvents(jsonEvent);
} @Override
public void configure(Context context) {
this.context = context;
} private List<Event> getSimpleEvents(Event e) {
List<Event> newEvents = new ArrayList<Event>(1);
newEvents.add(EventBuilder.withBody(e.getBody(), e.getHeaders()));
return newEvents;
}
}

自定义Sink

config

#自定义Sink
a1.sinks.k1.type = org.apache.flume.sw.sink.RollingFileSink
a1.sinks.k1.channel = c1
a1.sinks.k1.sink.rollInterval = 15
a1.sinks.k1.sink.directory = D:/var/log/flume
#自定义pathManager类型
a1.sinks.k1.sink.pathManager = CUSTOM
#文件创建频率 (null or yyyyMMddHHmmss), 默认值null->不创建
a1.sinks.k1.sink.pathManager.dirNameFormatter = yyyyMMdd
a1.sinks.k1.sink.pathManager.prefix = log_
a1.sinks.k1.sink.pathManager.extension = txt

自定义RollingFileSink

    if(pathManagerType.equals("CUSTOM")) {
//如果外部配置的PathManager是CUSTOM,则直接new出自定义的SimplePathManager
pathController = new SimplePathManager(pathManagerContext);
} else {
pathController = PathManagerFactory.getInstance(pathManagerType, pathManagerContext);
}

自定义pathManager类型

public class SimplePathManager extends DefaultPathManager {
private static final Logger logger = LoggerFactory
.getLogger(SimplePathManager.class);
private final DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyyMMddHHmmss");
private DateTimeFormatter dirNameFormatter = null; private String lastRoll; public SimplePathManager(Context context) {
super(context); String dirNameFormatterStr = context.getString("dirNameFormatter");
if(dirNameFormatterStr == null || "null".equals(dirNameFormatterStr)){
dirNameFormatter = null;
} else {
dirNameFormatter = DateTimeFormat.forPattern(dirNameFormatterStr);
} } @Override
public File nextFile() {
LocalDateTime now = LocalDateTime.now();
StringBuilder sb = new StringBuilder();
String date = formatter.print(now);
if (!date.equals(lastRoll)) {
getFileIndex().set(0);
lastRoll = date;
}
sb.append(getPrefix()).append(date).append("-");
sb.append(getFileIndex().incrementAndGet());
if (getExtension().length() > 0) {
sb.append(".").append(getExtension());
} File dir = dirNameFormatter != null ? new File(getBaseDirectory(), dirNameFormatter.print(now)) :
getBaseDirectory(); try {
FileUtils.forceMkdir(dir);
currentFile = new File(dir, sb.toString());
} catch (IOException e) {
currentFile = new File(getBaseDirectory(), sb.toString());
logger.error(e.toString(), e);
} return currentFile;
} public static class Builder implements PathManager.Builder {
@Override
public PathManager build(Context context) {
return new SimplePathManager(context);
}
} }

Apache Flume 1.7.0 自定义输入输出的更多相关文章

  1. Apache Flume 1.7.0 发布,日志服务器

    Apache Flume 1.7.0 发布了,Flume 是一个分布式.可靠和高可用的服务,用于收集.聚合以及移动大量日志数据,使用一个简单灵活的架构,就流数据模型.这是一个可靠.容错的服务. 本次更 ...

  2. Apache Flume 1.7.0 源码编译 导入Eclipse

    前言 最近看了看Apache Flume,在虚拟机里跑了一下flume + kafka + storm + mysql架构的demo,功能很简单,主要是用flume收集数据源(http上报信息),放入 ...

  3. Apache Flume 1.6.0 发布,日志服务器

    Apache Flume 1.6.0 发布,此版本现已提供下载: http://flume.apache.org/download.html 更新日志和文档: http://flume.apache. ...

  4. Apache Flume 1.7.0 各个模块简介

    Flume简介 Apache Flume是一个分布式.可靠.高可用的日志收集系统,支持各种各样的数据来源,如http,log文件,jms,监听端口数据等等,能将这些数据源的海量日志数据进行高效收集.聚 ...

  5. Flume 1.5.0简单部署试用

    ================================================================================ 一.Flume简介 ========= ...

  6. Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)中一些知识点

    Flume官方文档翻译--Flume 1.7.0 User Guide (unreleased version)(一) Flume官方文档翻译--Flume 1.7.0 User Guide (unr ...

  7. Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)(二)

    Flume官方文档翻译--Flume 1.7.0 User Guide (unreleased version)(一) Logging raw data(记录原始数据) Logging the raw ...

  8. Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)(一)

    Flume 1.7.0 User Guide Introduction(简介) Overview(综述) System Requirements(系统需求) Architecture(架构) Data ...

  9. Apache Spark 2.2.0 中文文档 - Spark Streaming 编程指南 | ApacheCN

    Spark Streaming 编程指南 概述 一个入门示例 基础概念 依赖 初始化 StreamingContext Discretized Streams (DStreams)(离散化流) Inp ...

随机推荐

  1. arm 执行 交叉编译完成的可执行文件时 出现premission denied 问题

    我用的tftp传过去的文件 然后传完 执行的时候 出现了premission denied (权限不够)得问题 解决方法 就是添加权限  chmod 777 filename

  2. mysql 数据库链接 过期产品

    注意下面的"@" 必须要加的!否则会报错! $link = @mysql_connect('localhost','root','root') or die("link ...

  3. 对excel文件的读取

    poi上传文件,准备 <dependencies><dependency> <groupId>org.apache.poi</groupId> < ...

  4. jQuery源码框架fn解读

    (function( window, undefined ){ var jQuery = (function(){ var jQuery = function( selector, context ) ...

  5. OpenGL ES中MRT应用

    Demo涵盖了OpenGL ES 3.0 的一系列新特性: 1.VAO和VBO 2.帧缓冲对象 3.MRT 效果: 代码: //yf's version #define STB_IMAGE_IMPLE ...

  6. html4

    一.span标签:能让某几个文字或者某个词语凸显出来 <p> 今天是11月份的<span>第一天</span>,地铁卡不打折了 </p> 二.字体风格 ...

  7. 使用 vs code 搭建vue项目(一)

    1. 配置环境 1.1. 安装nodejs,下载地址:https://nodejs.org/en/download/. 步骤-..安装完成后,输入node-v,提示如下,则安装完成. 1.2. 安装v ...

  8. ajax轮询与长轮询

      刚刚网了关于轮询的知识,必须拿到自己这里来做个备份了! 其实以前用ajax轮询做个及时数据更新的,只是当时做了不知道那个就是轮询. 首先我们什么时候会想到用轮询技术呢? 一般而言,最多的是及时信息 ...

  9. 团队-Forward团队-团队一阶段互评

    团队名称:Forward团队 学号:2015035107105 得分:7 原因:知识欠缺,能够一边学一边做 学号:2015035107109 得分:6 原因:对我有很多帮助 学号:2015035107 ...

  10. 基于jmeter的性能测试平台(二) 一个构想

    之前基于jmeter搭好了分布式测试平台,但是感觉还是很粗糙,打算给它穿点衣服. 整个架构差不多就像下面这个图. (1)基于python django做一个web页面,友好地管理测试过程 (2)con ...