Apache Flume 1.7.0 自定义输入输出
自定义http source
config
a1.sources.r1.type=http
a1.sources.r1.bind=localhost
a1.sources.r1.port=
a1.sources.r1.channels=c1
#自定义source Handler
a1.sources.r1.handler = org.apache.flume.sw.source.http.JSONHandler
a1.sources.r1.handler.configHome = /home/www/logs/datareport
handler
public class JSONHandler implements HTTPSourceHandler { private static final Logger LOG = LoggerFactory.getLogger(JSONHandler.class); public static final String PARA_SIGN = "sign";
public static final String PARA_PROJECT_ID = "projectId";
public static final String PARA_REPORT_MSG = "reportMsg"; private final Type mapType = new TypeToken<LinkedHashMap<String, Object>>() {}.getType();
private final Gson gson; //可以获取外部参数
private Context context = null; public JSONHandler() {
gson = new GsonBuilder().disableHtmlEscaping().create();
} /**
* {@inheritDoc}
*/
@Override
public List<Event> getEvents(HttpServletRequest request) throws Exception {
BufferedReader reader = request.getReader();
String charset = request.getCharacterEncoding();
//UTF-8 is default for JSON. If no charset is specified, UTF-8 is to
//be assumed.
if (charset == null) {
LOG.debug("Charset is null, default charset of UTF-8 will be used.");
charset = "UTF-8";
} else if (!(charset.equalsIgnoreCase("utf-8")
|| charset.equalsIgnoreCase("utf-16")
|| charset.equalsIgnoreCase("utf-32"))) {
LOG.error("Unsupported character set in request {}. "
+ "JSON handler supports UTF-8, "
+ "UTF-16 and UTF-32 only.", charset);
throw new UnsupportedCharsetException("JSON handler supports UTF-8, "
+ "UTF-16 and UTF-32 only.");
} /*
* Gson throws Exception if the data is not parseable to JSON.
* Need not catch it since the source will catch it and return error.
*/
LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
try {
map = gson.fromJson(reader, mapType);
} catch (JsonSyntaxException ex) {
throw new HTTPBadRequestException("Request has invalid JSON Syntax.", ex);
} String configHome = this.context.getString("configHome");
LOG.info(configHome);
String projectId = map.get(PARA_PROJECT_ID).toString();
String reportMsg = map.get(PARA_REPORT_MSG).toString();
Map<String, String> headers = new HashMap<String, String>();
headers.put(PARA_PROJECT_ID, projectId);
headers.put(PARA_SIGN, "");
JSONEvent jsonEvent = new JSONEvent();
jsonEvent.setHeaders(headers);
jsonEvent.setBody(reportMsg.getBytes()); return getSimpleEvents(jsonEvent);
} @Override
public void configure(Context context) {
this.context = context;
} private List<Event> getSimpleEvents(Event e) {
List<Event> newEvents = new ArrayList<Event>(1);
newEvents.add(EventBuilder.withBody(e.getBody(), e.getHeaders()));
return newEvents;
}
}
自定义Sink
config
#自定义Sink
a1.sinks.k1.type = org.apache.flume.sw.sink.RollingFileSink
a1.sinks.k1.channel = c1
a1.sinks.k1.sink.rollInterval = 15
a1.sinks.k1.sink.directory = D:/var/log/flume
#自定义pathManager类型
a1.sinks.k1.sink.pathManager = CUSTOM
#文件创建频率 (null or yyyyMMddHHmmss), 默认值null->不创建
a1.sinks.k1.sink.pathManager.dirNameFormatter = yyyyMMdd
a1.sinks.k1.sink.pathManager.prefix = log_
a1.sinks.k1.sink.pathManager.extension = txt
自定义RollingFileSink
if(pathManagerType.equals("CUSTOM")) {
//如果外部配置的PathManager是CUSTOM,则直接new出自定义的SimplePathManager
pathController = new SimplePathManager(pathManagerContext);
} else {
pathController = PathManagerFactory.getInstance(pathManagerType, pathManagerContext);
}
自定义pathManager类型
public class SimplePathManager extends DefaultPathManager {
private static final Logger logger = LoggerFactory
.getLogger(SimplePathManager.class);
private final DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyyMMddHHmmss");
private DateTimeFormatter dirNameFormatter = null; private String lastRoll; public SimplePathManager(Context context) {
super(context); String dirNameFormatterStr = context.getString("dirNameFormatter");
if(dirNameFormatterStr == null || "null".equals(dirNameFormatterStr)){
dirNameFormatter = null;
} else {
dirNameFormatter = DateTimeFormat.forPattern(dirNameFormatterStr);
} } @Override
public File nextFile() {
LocalDateTime now = LocalDateTime.now();
StringBuilder sb = new StringBuilder();
String date = formatter.print(now);
if (!date.equals(lastRoll)) {
getFileIndex().set(0);
lastRoll = date;
}
sb.append(getPrefix()).append(date).append("-");
sb.append(getFileIndex().incrementAndGet());
if (getExtension().length() > 0) {
sb.append(".").append(getExtension());
} File dir = dirNameFormatter != null ? new File(getBaseDirectory(), dirNameFormatter.print(now)) :
getBaseDirectory(); try {
FileUtils.forceMkdir(dir);
currentFile = new File(dir, sb.toString());
} catch (IOException e) {
currentFile = new File(getBaseDirectory(), sb.toString());
logger.error(e.toString(), e);
} return currentFile;
} public static class Builder implements PathManager.Builder {
@Override
public PathManager build(Context context) {
return new SimplePathManager(context);
}
} }
Apache Flume 1.7.0 自定义输入输出的更多相关文章
- Apache Flume 1.7.0 发布,日志服务器
Apache Flume 1.7.0 发布了,Flume 是一个分布式.可靠和高可用的服务,用于收集.聚合以及移动大量日志数据,使用一个简单灵活的架构,就流数据模型.这是一个可靠.容错的服务. 本次更 ...
- Apache Flume 1.7.0 源码编译 导入Eclipse
前言 最近看了看Apache Flume,在虚拟机里跑了一下flume + kafka + storm + mysql架构的demo,功能很简单,主要是用flume收集数据源(http上报信息),放入 ...
- Apache Flume 1.6.0 发布,日志服务器
Apache Flume 1.6.0 发布,此版本现已提供下载: http://flume.apache.org/download.html 更新日志和文档: http://flume.apache. ...
- Apache Flume 1.7.0 各个模块简介
Flume简介 Apache Flume是一个分布式.可靠.高可用的日志收集系统,支持各种各样的数据来源,如http,log文件,jms,监听端口数据等等,能将这些数据源的海量日志数据进行高效收集.聚 ...
- Flume 1.5.0简单部署试用
================================================================================ 一.Flume简介 ========= ...
- Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)中一些知识点
Flume官方文档翻译--Flume 1.7.0 User Guide (unreleased version)(一) Flume官方文档翻译--Flume 1.7.0 User Guide (unr ...
- Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)(二)
Flume官方文档翻译--Flume 1.7.0 User Guide (unreleased version)(一) Logging raw data(记录原始数据) Logging the raw ...
- Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)(一)
Flume 1.7.0 User Guide Introduction(简介) Overview(综述) System Requirements(系统需求) Architecture(架构) Data ...
- Apache Spark 2.2.0 中文文档 - Spark Streaming 编程指南 | ApacheCN
Spark Streaming 编程指南 概述 一个入门示例 基础概念 依赖 初始化 StreamingContext Discretized Streams (DStreams)(离散化流) Inp ...
随机推荐
- sql_calc_found_rows原理
- java37
1.键盘录入格式: Scanner sc = new Scanner(System.in); int i = sc.nextInt();//录入整数 字符: Scanner sc1 = new Sca ...
- 基于TeamCity的asp.net mvc/core,Vue 持续集成与自动部署
一 Web Server(Windows)端的配置 1.配置IIS,重要的是管理服务 1.1 配置FTP(前端NPM项目需要) 该步骤略,如果是在阿里云ESC上,需要开启端口21(用来FTP认证握手) ...
- Linux学习--gdb调试
一.gdb常用命令: 命令 描述 backtrace(或bt) 查看各级函数调用及参数 finish 连续运行到当前函数返回为止,然后停下来等待命令 frame(或f) 帧编号 选择栈帧 info(或 ...
- Cannot run CentOS 7 or RHEL 7 installer: “Failed to start Switch Root”
这个问题是由于安装程序默认的LABEL对于你要安装的磁盘系统分区不匹配造成的 通过编辑引导参数来使安装程序运行 在选择安装选项之前,按‘e’添加相应的引导参数
- java日志系统中的 NDC
NDC https://www.cnblogs.com/smile361/p/3853404.html
- pageHelper的使用步骤,省略sql语句中的limit
1.引架包.注意版本问题 <dependency> <groupId>com.github.pagehelper</groupId> <artifactId& ...
- css之absolute温习
1.absolute:生成绝对定位的元素,相对于 static 定位以外的第一个父元素进行定位.(父元素不仅可以是相对定位(relative)) 2.当绝对定位的元素找不到定位类型是非static定位 ...
- R_展示变量之间关系的图形
#绘制普通矩阵散点图 plot(dataframe) #绘制带有拟合直线,最佳拟合曲线和直方图的矩阵散点图 library(car) attach(dataframe) scatterplotMatr ...
- java 静态变量 静态代码块 加载顺序问题
在网上看了一个这样的题目 public class StaticTest { public static void main(String[] args) { staticFunction(); } ...