1.创建maven项目:

pom.xml:

<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
         http://maven.apache.org/xsd/maven-4.0.0.xsd">

  <modelVersion></modelVersion>
  <groupId>storm.book</groupId>
  <artifactId>Getting-Started</artifactId>
  <version>-SNAPSHOT</version>

  <build>
   <plugins>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-compiler-plugin</artifactId>
        <version></version>
        <configuration>
          <source>1.6</source>
          <target>1.6</target>
          <compilerVersion>1.6</compilerVersion>
        </configuration>
      </plugin>
     </plugins>
  </build>

  <repositories>

        <!-- Repository where we can found the storm dependencies  -->
        <repository>
            <id>clojars.org</id>
            <url>http://clojars.org/repo</url>
        </repository>

  </repositories>

  <dependencies>

        <!-- Storm Dependency -->
        <dependency>
          <groupId>storm</groupId>
          <artifactId>storm</artifactId>
          <version></version>
       </dependency>

  </dependencies>

</project>

相关类:

WordReader:

package spouts;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.Map;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;

public class WordReader extends BaseRichSpout {

    private SpoutOutputCollector collector;
    private FileReader fileReader;
    private boolean completed = false;
    public void ack(Object msgId) {
        System.out.println("OK:"+msgId);
    }
    public void close() {}
    public void fail(Object msgId) {
        System.out.println("FAIL:"+msgId);
    }

    /**
     * The only thing that the methods will do It is emit each
     * file line
     */
    public void nextTuple() {
        /**
         * The nextuple it is called forever, so if we have been readed the file
         * we will wait and then return
         */
        if(completed){
            try {
                Thread.sleep();
            } catch (InterruptedException e) {
                //Do nothing
            }
            return;
        }
        String str;
        //Open the reader
        BufferedReader reader = new BufferedReader(fileReader);
        try{
            //Read all lines
            while((str = reader.readLine()) != null){
                /**
                 * By each line emmit a new value with the line as a their
                 */
                this.collector.emit(new Values(str),str);
            }
        }catch(Exception e){
            throw new RuntimeException("Error reading tuple",e);
        }finally{
            completed = true;
        }
    }

    /**
     * We will create the file and get the collector object
     */
    public void open(Map conf, TopologyContext context,
            SpoutOutputCollector collector) {
        try {
            this.fileReader = new FileReader(conf.get("wordsFile").toString());
        } catch (FileNotFoundException e) {
            throw new RuntimeException("Error reading file ["+conf.get("wordFile")+"]");
        }
        this.collector = collector;
    }

    /**
     * Declare the output field "word"
     */
    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        declarer.declare(new Fields("line"));
    }
}

WordCounter:

package bolts;

import java.util.HashMap;
import java.util.Map;

import backtype.storm.task.TopologyContext;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Tuple;

public class WordCounter extends BaseBasicBolt {

    Integer id;
    String name;
    Map<String, Integer> counters;

    /**
     * At the end of the spout (when the cluster is shutdown
     * We will show the word counters
     */
    @Override
    public void cleanup() {
        System.out.println("-- Word Counter ["+name+"-"+id+"] --");
        for(Map.Entry<String, Integer> entry : counters.entrySet()){
            System.out.println(entry.getKey()+": "+entry.getValue());
        }
    }

    /**
     * On create
     */
    @Override
    public void prepare(Map stormConf, TopologyContext context) {
        this.counters = new HashMap<String, Integer>();
        this.name = context.getThisComponentId();
        this.id = context.getThisTaskId();
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer declarer) {}

    @Override
    public void execute(Tuple input, BasicOutputCollector collector) {
        String str = input.getString();
        /**
         * If the word dosn't exist in the map we will create
         * this, if not We will add 1
         */
        if(!counters.containsKey(str)){
            counters.put(str, );
        }else{
            Integer c = counters.;
            counters.put(str, c);
        }
    }
}

WordNormalizer:

package bolts;

import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;

public class WordNormalizer extends BaseBasicBolt {

    public void cleanup() {}

    /**
     * The bolt will receive the line from the
     * words file and process it to Normalize this line
     *
     * The normalize will be put the words in lower case
     * and split the line to get all words in this
     */
    public void execute(Tuple input, BasicOutputCollector collector) {
        String sentence = input.getString();
        String[] words = sentence.split(" ");
        for(String word : words){
            word = word.trim();
            if(!word.isEmpty()){
                word = word.toLowerCase();
                collector.emit(new Values(word));
            }
        }
    }

    /**
     * The bolt will only emit the field "word"
     */
    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        declarer.declare(new Fields("word"));
    }
}

TopologyMain:

import spouts.WordReader;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;
import bolts.WordCounter;
import bolts.WordNormalizer;

public class TopologyMain {
    public static void main(String[] args) throws InterruptedException {

        //Topology definition
        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("word-reader",new WordReader());
        builder.setBolt("word-normalizer", new WordNormalizer())
            .shuffleGrouping("word-reader");
        builder.setBolt()
            .fieldsGrouping("word-normalizer", new Fields("word"));

        //Configuration
        Config conf = new Config();
        conf.put(]);
        conf.setDebug(false);
        //Topology run
        conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, );
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("Getting-Started-Toplogie", conf, builder.createTopology());
        Thread.sleep();
        cluster.shutdown();
    }
}

工程架构

运行:

结果

强大的流处理!

github:https://github.com/super-d2/strom_demo

Storm工程创建的更多相关文章

  1. Android开发之基于AndroidStudio环境搭建和工程创建

    断断续续的学习安卓也有一段时间了.因为之前是搞iOS开发的, 之前有关iOS的博客请看<我的iOS开发系列博文>.<我的Objective-C系列文章>和<窥探Swift ...

  2. 161130、Dubbo+SpringMVC工程创建详解

    Dubbo出现的目的是为了应对现在高并发,高数据量请求的问题.目前的垂直应用架构已经无法满足现在大数据的冲击,SOA就应运而生,而Dubbo在国内使用的还是比较多,稳定性也比较不错. 架构 节点角色说 ...

  3. VC++2008 用空工程创建 DLL

    VC++2008 用空工程创建 DLL 一.创建 DLL 工程项目: 1)点击菜单[File] -> [New] -> [Project...] 弹出 “New Project” 对话框: ...

  4. Django初探——工程创建以及models数据库相关配置

    Python的WEB框架有Django.Tornado.Flask 等多种,Django相较与其他WEB框架其优势为:大而全,框架本身集成了ORM.模型绑定.模板引擎.缓存.Session等诸多功能. ...

  5. N76E003 工程创建教程

    一.准备工作: 1.下载编译工具keil c51 2.下载N76E003提供的板级支持包(BSP),可到nuvoton上下载   二.开发环境搭建 1.安装keil c51,然后和谐...不能随便发链 ...

  6. 第1天:Django框架简介与工程创建

    Django简介 Django特点 环境搭建 创建工程 启动服务 使用pycharm打开工程 创建子应用 Django简介 Django,是用Python语言写的开源web开发框架,并遵循MVC设计. ...

  7. java web 工程创建及servlet简单使用

    1.java web工程创建 (1)File--->new--->project (2)选择java enterprise,按照下图操作 (3)点击next后,会进入如下界面,修改工程名后 ...

  8. Django工程创建

    方法一: 1.win+r进入cmd命令窗口: 2.找到Django的安装地址: 3.cmd窗口中利用cd 进入相应的文件夹,再输入命令如下: django-admin.exe startproject ...

  9. Django—工程创建以及models数据库易错点

    Python的WEB框架有Django.Tornado.Flask 等多种,Django相较与其他WEB框架其优势为:大而全,框架本身集成了ORM.模型绑定.模板引擎.缓存.Session等诸多功能. ...

随机推荐

  1. hdu 1013 Digital Roots

    #include <stdio.h> int main(void) { int m,i;char n[10000]; while(scanf("%s",&n)= ...

  2. 让Windows 7内置Administrator 用户也能使用指纹登录

    前言 这周末重装了个系统,之前用windows 8 现在8.1预览版出来了,琢磨着是不是给升级玩玩.装上后感觉变化不大,后来一折腾,就换回windows 7 了(64位旗舰版).将安装时创建的用户删除 ...

  3. [工作积累] Google/Amazon平台的各种坑

    所谓坑, 就是文档中没有标明的特别需要处理的细节, 工作中会被无故的卡住各种令人恼火的问题. 包括系统级的bug和没有文档化的限制. 继Android的各种坑后, 现在做Amazon平台, 遇到的坑很 ...

  4. TCP,IP,HTTP,SOCKET区别和联系

    物理层-- 数据链路层-- 传输层--                       TCP协议 会话层-- 我 们在传输数据时,可以只使用(传输层)TCP/IP协议,但是那样的话,如 果没有应用层,便 ...

  5. CentOS6.5安装iftop

    iftop这个小工具是Linux和unix下的top命令升级版,功能相对较强,界面易懂.今天安装了CentOS6.5的最新版,装个小工具检查下系统运行性能. 官网:http://www.ex-parr ...

  6. 浅谈如何使用Log4j记录日志

    一.什么是log4j Log4J是Apache的一个开放源代码的项目.通过使用Log4J,程序员可以控制日志信息输送的目的地,包括控制台,文件,GUI组件和NT事件记录器,也可以控制每一条日志的输出格 ...

  7. (源码)自己写的ScrollView里套漂亮的圆角listview(算是漂亮吧。。。)

    找了相关的资料终于写完了: http://blog.csdn.net/jamin0107/article/details/6973845 和 http://emmet1988.iteye.com/bl ...

  8. Coursera台大机器学习技法课程笔记03-Kernel Support Vector Machine

    这一节讲的是核化的SVM,Andrew Ng的那篇讲义也讲过,讲的也不错. 首先讲的是kernel trick,为了简化将低维特征映射高维特征后的计算,使用了核技巧.讲义中还讲了核函数的判定,即什么样 ...

  9. JAVA程序1,1,2,3,5,8,13,21....第30个是什么...?

    解题思路:从第3个数字开始,后一个数字是前2个数字的和public class text{ public static void main(String[] args) { int num1=1,nu ...

  10. recv和send函数

    转自  http://www.cnblogs.com/blankqdb/archive/2012/08/30/2663859.html 1. send解析 sockfd:指定发送端套接字描述符. bu ...