final StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment(); 

/*
* Filter
*/
DataStream<Long> input = streamExecutionEnvironment.generateSequence(-5, 5); input.filter(new FilterFunction<Long>() { @Override
public boolean filter(Long value) throws Exception {
// TODO Auto-generated method stub
return value >= 0;
}
}).print(); streamExecutionEnvironment.execute();

/*
* Connect
*/ DataStream<Long> someStream = streamExecutionEnvironment.generateSequence(0, 10); DataStream<String> otherStream = streamExecutionEnvironment.fromElements(WordCountData.WORDS); ConnectedStreams<Long, String> connectedStreams = someStream.connect(otherStream); DataStream<String> result = connectedStreams.flatMap(new CoFlatMapFunction<Long, String, String>() { @Override
public void flatMap1(Long value, Collector<String> out) throws Exception {
// TODO Auto-generated method stub
out.collect(value.toString());
} @Override
public void flatMap2(String value, Collector<String> out) throws Exception {
// TODO Auto-generated method stub
Arrays.asList(value.split("\\W+")).stream().forEachOrdered(str -> out.collect(str));
}
}); result.print(); streamExecutionEnvironment.execute();

/*
* KeyBy
*/ DataStream<Tuple4<String, String, String, Integer>> input = streamExecutionEnvironment.fromElements(TRANSCRIPT); KeyedStream<Tuple4<String, String, String, Integer>, Tuple> keyedStream = input.keyBy("f0"); keyedStream.print(); keyedStream.maxBy("f3").print(); streamExecutionEnvironment.execute(); public static final Tuple4[] TRANSCRIPT = new Tuple4[] { Tuple4.of("class1","张三","语文",100), Tuple4.of("class1","李四","语文",78), Tuple4.of("class1","王五","语文",99), Tuple4.of("class2","赵六","语文",81), Tuple4.of("class2","钱七","语文",59), Tuple4.of("class2","马二","语文",97) };

/*
* Map
*/
DataStream<Long> input = streamExecutionEnvironment.generateSequence(0, 10); DataStream<Long> plusOne = input.map(new MapFunction<Long, Long>() { @Override
public Long map(Long value) throws Exception {
// TODO Auto-generated method stub
return value + 1;
}
}); plusOne.print(); streamExecutionEnvironment.execute();

/*
* Fold
*/
DataStream<Tuple4<String, String, String, Integer>> input = streamExecutionEnvironment.fromElements(TRANSCRIPT); DataStream<String> result = input.keyBy(0).fold("Start", new FoldFunction<Tuple4<String, String, String, Integer>, String>() { @Override
public String fold(String str, Tuple4<String, String, String, Integer> value) throws Exception {
// TODO Auto-generated method stub
return str + " = " + value.f1 + " ";
}
}); result.print(); streamExecutionEnvironment.execute(); public static final Tuple4[] TRANSCRIPT = new Tuple4[] { Tuple4.of("class1","张三","语文",100), Tuple4.of("class1","李四","语文",78), Tuple4.of("class1","王五","语文",99), Tuple4.of("class2","赵六","语文",81), Tuple4.of("class2","钱七","语文",59), Tuple4.of("class2","马二","语文",97) }; /**
1> Start = 赵六
1> Start = 赵六 = 钱七
1> Start = 赵六 = 钱七 = 马二 2> Start = 张三
2> Start = 张三 = 李四
2> Start = 张三 = 李四 = 王五
*/

/*
* Reduce
*/
DataStream<Tuple4<String, String, String, Integer>> input = streamExecutionEnvironment.fromElements(TRANSCRIPT); KeyedStream<Tuple4<String, String, String, Integer>, Tuple> keyedStream = input.keyBy(0); keyedStream.reduce(new ReduceFunction<Tuple4<String, String, String, Integer>>() { @Override
public Tuple4<String, String, String, Integer> reduce(Tuple4<String, String, String, Integer> value1,
Tuple4<String, String, String, Integer> value2) throws Exception {
// TODO Auto-generated method stub
value1.f3 += value2.f3;
return value1;
}
}).print(); streamExecutionEnvironment.execute(); /**
2> (class1,张三,语文,100)
2> (class1,张三,语文,178)
2> (class1,张三,语文,277)
1> (class2,赵六,语文,81)
1> (class2,赵六,语文,140)
1> (class2,赵六,语文,237)
*/

/*
* Project
*/
DataStream<Tuple4<String, String, String, Integer>> input = streamExecutionEnvironment.fromElements(TRANSCRIPT); DataStream<Tuple2<String, Integer>> output = input.project(1, 3); output.print(); streamExecutionEnvironment.execute(); /**
4> (张三,100)
4> (钱七,59)
2> (王五,99)
3> (赵六,81)
1> (李四,78)
1> (马二,97)
*/

/*
* SplitAndSelect
*/
DataStream<Long> input = streamExecutionEnvironment.generateSequence(0, 10); SplitStream<Long> splitStream = input.split(new OutputSelector<Long>() { @Override
public Iterable<String> select(Long value) {
// TODO Auto-generated method stub
List<String> output = new ArrayList<>();
if (value % 2 == 0) {
output.add(EVEN);
} else {
output.add(ODD);
}
return output;
}
}); // splitStream.print(); DataStream<Long> even = splitStream.select(EVEN); DataStream<Long> odd = splitStream.select(ODD); DataStream<Long> all = splitStream.select(EVEN, ODD); odd.print(); streamExecutionEnvironment.execute();

/*
* FlatMap
*/
DataStream<String> input = streamExecutionEnvironment.fromElements(WordCountData.WORDS); DataStream<String> wordStream = input.flatMap(new FlatMapFunction<String, String>() { @Override
public void flatMap(String value, Collector<String> out) throws Exception {
// TODO Auto-generated method stub
Arrays.asList(value.toLowerCase().split("\\W+")).stream().filter(str -> str.length() > 0).forEach(str -> out.collect(str));
}
}); wordStream.print(); streamExecutionEnvironment.execute();

Flink入门 - API的更多相关文章

  1. Flink入门(五)——DataSet Api编程指南

    Apache Flink Apache Flink 是一个兼顾高吞吐.低延迟.高性能的分布式处理框架.在实时计算崛起的今天,Flink正在飞速发展.由于性能的优势和兼顾批处理,流处理的特性,Flink ...

  2. Flink入门宝典(详细截图版)

    本文基于java构建Flink1.9版本入门程序,需要Maven 3.0.4 和 Java 8 以上版本.需要安装Netcat进行简单调试. 这里简述安装过程,并使用IDEA进行开发一个简单流处理程序 ...

  3. Flink入门(二)——Flink架构介绍

    1.基本组件栈 了解Spark的朋友会发现Flink的架构和Spark是非常类似的,在整个软件架构体系中,同样遵循着分层的架构设计理念,在降低系统耦合度的同时,也为上层用户构建Flink应用提供了丰富 ...

  4. Flink入门(四)——编程模型

    flink是一款开源的大数据流式处理框架,他可以同时批处理和流处理,具有容错性.高吞吐.低延迟等优势,本文简述flink的编程模型. 数据集类型: 无穷数据集:无穷的持续集成的数据集合 有界数据集:有 ...

  5. 【翻译】Flink Table Api & SQL — SQL客户端Beta 版

    本文翻译自官网:SQL Client Beta  https://ci.apache.org/projects/flink/flink-docs-release-1.9/dev/table/sqlCl ...

  6. 记一次flink入门学习笔记

    团队有几个系统数据量偏大,且每天以几万条的数量累增.有一个系统每天需要定时读取数据库,并进行相关的业务逻辑计算,从而获取最新的用户信息,定时任务的整个耗时需要4小时左右.由于定时任务是夜晚执行,目前看 ...

  7. 不一样的Flink入门教程

    前言 微信搜[Java3y]关注这个朴实无华的男人,点赞关注是对我最大的支持! 文本已收录至我的GitHub:https://github.com/ZhongFuCheng3y/3y,有300多篇原创 ...

  8. Flink入门-第一篇:Flink基础概念以及竞品对比

    Flink入门-第一篇:Flink基础概念以及竞品对比 Flink介绍 截止2021年10月Flink最新的稳定版本已经发展到1.14.0 Flink起源于一个名为Stratosphere的研究项目主 ...

  9. Flink入门使用

    完全参考:Flink1.3QuickStart 启动本地运行 首先找一台安装了hadoop的linux. 将安装包解压,到bin目录启动local模式的脚本. tar -zxvf flink-1.3. ...

随机推荐

  1. patchUpload.vue?5e29:406 Uncaught (in promise) DOMException: Failed to execute 'readAsArrayBuffer' on 'FileReader': The object is already busy reading Blobs.

    patchUpload.vue?5e29:406 Uncaught (in promise) DOMException: Failed to execute 'readAsArrayBuffer' o ...

  2. GEOS/GDAL 交叉编译ARM64-linux版本

    目录 安装编译环境 编译PROJ.4 编译GEOS 编译GDAL 编译后程序运行注意事项 因为试用华为云ARM64服务器(CentOS 7 操作系统)的时候,在云服务器上编译GDAL很长时间也没有编译 ...

  3. ROC曲线 VS PR曲线

    python机器学习-乳腺癌细胞挖掘(博主亲自录制视频)https://study.163.com/course/introduction.htm?courseId=1005269003&ut ...

  4. 使用Flume-Taildir和rocketmq-flume与RocketMQ的结合

    一.Fume-Taidir Flume1.7.0加入了taildirSource作为agent的source.可以说是 Spooling Directory Source + Exec Source ...

  5. 运维笔记--Docker环境ubuntu系统安装指定版本python[3.6]

    场景描述: 直接安装出现如下异常: root@ae2d02e458f3:/home# apt-get install python3.6 Reading package lists... Done B ...

  6. weui.js汉字乱码

    2019-6-25 11:04:13 星期二 min.js 源文件中会自带乱码: 鍙栨秷:  取消; 纭畾:   确定; 方案: 把weui.js用notepad++打开, 搜索乱码字符, 替换掉, ...

  7. 006-nginx.conf详解-error_page 使用

    一.概述 nginx指令error_page的作用是当发生错误的时候能够显示一个预定义的uri 1.1.使用步骤 更改nginx.conf在http定义区域加入: proxy_intercept_er ...

  8. 为什么java里面经常作List判断的时候,既要判断list不为null,又要判断size>0呢?

    没有考虑到具体的问题上面,我们单纯的来讲: 为什么java里面经常作List判断的时候,既要判断list不为null,又要判断size>0呢? list == null 说明list没有初始化( ...

  9. 【SpringBoot】SpringBoot与SpringMVC自动配置(五)

    本文介绍SpringBoot对Spring MVC自动配置,SpringBoot自动配置原理可以参考:[SpringBoot]SpringBoot配置与单元测试(二) 首先新建一个SpringBoot ...

  10. @vue/cli 4.1.1安装

    按照安装步骤,先卸载,再安装,最终,查看vue -V 的版本都是3.8.2,也就是说并没有安装成功,于是,考虑用yarn去安装 1,首先清除缓存: yarn cache clean 2,yarn设置淘 ...