1.创建maven项目:

pom.xml:

<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
         http://maven.apache.org/xsd/maven-4.0.0.xsd">

  <modelVersion></modelVersion>
  <groupId>storm.book</groupId>
  <artifactId>Getting-Started</artifactId>
  <version>-SNAPSHOT</version>

  <build>
   <plugins>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-compiler-plugin</artifactId>
        <version></version>
        <configuration>
          <source>1.6</source>
          <target>1.6</target>
          <compilerVersion>1.6</compilerVersion>
        </configuration>
      </plugin>
     </plugins>
  </build>

  <repositories>

        <!-- Repository where we can found the storm dependencies  -->
        <repository>
            <id>clojars.org</id>
            <url>http://clojars.org/repo</url>
        </repository>

  </repositories>

  <dependencies>

        <!-- Storm Dependency -->
        <dependency>
          <groupId>storm</groupId>
          <artifactId>storm</artifactId>
          <version></version>
       </dependency>

  </dependencies>

</project>

相关类:

WordReader:

package spouts;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.Map;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;

public class WordReader extends BaseRichSpout {

    private SpoutOutputCollector collector;
    private FileReader fileReader;
    private boolean completed = false;
    public void ack(Object msgId) {
        System.out.println("OK:"+msgId);
    }
    public void close() {}
    public void fail(Object msgId) {
        System.out.println("FAIL:"+msgId);
    }

    /**
     * The only thing that the methods will do It is emit each
     * file line
     */
    public void nextTuple() {
        /**
         * The nextuple it is called forever, so if we have been readed the file
         * we will wait and then return
         */
        if(completed){
            try {
                Thread.sleep();
            } catch (InterruptedException e) {
                //Do nothing
            }
            return;
        }
        String str;
        //Open the reader
        BufferedReader reader = new BufferedReader(fileReader);
        try{
            //Read all lines
            while((str = reader.readLine()) != null){
                /**
                 * By each line emmit a new value with the line as a their
                 */
                this.collector.emit(new Values(str),str);
            }
        }catch(Exception e){
            throw new RuntimeException("Error reading tuple",e);
        }finally{
            completed = true;
        }
    }

    /**
     * We will create the file and get the collector object
     */
    public void open(Map conf, TopologyContext context,
            SpoutOutputCollector collector) {
        try {
            this.fileReader = new FileReader(conf.get("wordsFile").toString());
        } catch (FileNotFoundException e) {
            throw new RuntimeException("Error reading file ["+conf.get("wordFile")+"]");
        }
        this.collector = collector;
    }

    /**
     * Declare the output field "word"
     */
    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        declarer.declare(new Fields("line"));
    }
}

WordCounter:

package bolts;

import java.util.HashMap;
import java.util.Map;

import backtype.storm.task.TopologyContext;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Tuple;

public class WordCounter extends BaseBasicBolt {

    Integer id;
    String name;
    Map<String, Integer> counters;

    /**
     * At the end of the spout (when the cluster is shutdown
     * We will show the word counters
     */
    @Override
    public void cleanup() {
        System.out.println("-- Word Counter ["+name+"-"+id+"] --");
        for(Map.Entry<String, Integer> entry : counters.entrySet()){
            System.out.println(entry.getKey()+": "+entry.getValue());
        }
    }

    /**
     * On create
     */
    @Override
    public void prepare(Map stormConf, TopologyContext context) {
        this.counters = new HashMap<String, Integer>();
        this.name = context.getThisComponentId();
        this.id = context.getThisTaskId();
    }

    @Override
    public void declareOutputFields(OutputFieldsDeclarer declarer) {}

    @Override
    public void execute(Tuple input, BasicOutputCollector collector) {
        String str = input.getString();
        /**
         * If the word dosn't exist in the map we will create
         * this, if not We will add 1
         */
        if(!counters.containsKey(str)){
            counters.put(str, );
        }else{
            Integer c = counters.;
            counters.put(str, c);
        }
    }
}

WordNormalizer:

package bolts;

import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;

public class WordNormalizer extends BaseBasicBolt {

    public void cleanup() {}

    /**
     * The bolt will receive the line from the
     * words file and process it to Normalize this line
     *
     * The normalize will be put the words in lower case
     * and split the line to get all words in this
     */
    public void execute(Tuple input, BasicOutputCollector collector) {
        String sentence = input.getString();
        String[] words = sentence.split(" ");
        for(String word : words){
            word = word.trim();
            if(!word.isEmpty()){
                word = word.toLowerCase();
                collector.emit(new Values(word));
            }
        }
    }

    /**
     * The bolt will only emit the field "word"
     */
    public void declareOutputFields(OutputFieldsDeclarer declarer) {
        declarer.declare(new Fields("word"));
    }
}

TopologyMain:

import spouts.WordReader;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;
import bolts.WordCounter;
import bolts.WordNormalizer;

public class TopologyMain {
    public static void main(String[] args) throws InterruptedException {

        //Topology definition
        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("word-reader",new WordReader());
        builder.setBolt("word-normalizer", new WordNormalizer())
            .shuffleGrouping("word-reader");
        builder.setBolt()
            .fieldsGrouping("word-normalizer", new Fields("word"));

        //Configuration
        Config conf = new Config();
        conf.put(]);
        conf.setDebug(false);
        //Topology run
        conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, );
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("Getting-Started-Toplogie", conf, builder.createTopology());
        Thread.sleep();
        cluster.shutdown();
    }
}

工程架构

运行:

结果

强大的流处理!

github:https://github.com/super-d2/strom_demo

Storm工程创建的更多相关文章

  1. Android开发之基于AndroidStudio环境搭建和工程创建

    断断续续的学习安卓也有一段时间了.因为之前是搞iOS开发的, 之前有关iOS的博客请看<我的iOS开发系列博文>.<我的Objective-C系列文章>和<窥探Swift ...

  2. 161130、Dubbo+SpringMVC工程创建详解

    Dubbo出现的目的是为了应对现在高并发,高数据量请求的问题.目前的垂直应用架构已经无法满足现在大数据的冲击,SOA就应运而生,而Dubbo在国内使用的还是比较多,稳定性也比较不错. 架构 节点角色说 ...

  3. VC++2008 用空工程创建 DLL

    VC++2008 用空工程创建 DLL 一.创建 DLL 工程项目: 1)点击菜单[File] -> [New] -> [Project...] 弹出 “New Project” 对话框: ...

  4. Django初探——工程创建以及models数据库相关配置

    Python的WEB框架有Django.Tornado.Flask 等多种,Django相较与其他WEB框架其优势为:大而全,框架本身集成了ORM.模型绑定.模板引擎.缓存.Session等诸多功能. ...

  5. N76E003 工程创建教程

    一.准备工作: 1.下载编译工具keil c51 2.下载N76E003提供的板级支持包(BSP),可到nuvoton上下载   二.开发环境搭建 1.安装keil c51,然后和谐...不能随便发链 ...

  6. 第1天:Django框架简介与工程创建

    Django简介 Django特点 环境搭建 创建工程 启动服务 使用pycharm打开工程 创建子应用 Django简介 Django,是用Python语言写的开源web开发框架,并遵循MVC设计. ...

  7. java web 工程创建及servlet简单使用

    1.java web工程创建 (1)File--->new--->project (2)选择java enterprise,按照下图操作 (3)点击next后,会进入如下界面,修改工程名后 ...

  8. Django工程创建

    方法一: 1.win+r进入cmd命令窗口: 2.找到Django的安装地址: 3.cmd窗口中利用cd 进入相应的文件夹,再输入命令如下: django-admin.exe startproject ...

  9. Django—工程创建以及models数据库易错点

    Python的WEB框架有Django.Tornado.Flask 等多种,Django相较与其他WEB框架其优势为:大而全,框架本身集成了ORM.模型绑定.模板引擎.缓存.Session等诸多功能. ...

随机推荐

  1. UVa OJ 140 - Bandwidth (带宽)

    Time limit: 3.000 seconds限时3.000秒 Problem问题 Given a graph (V,E) where V is a set of nodes and E is a ...

  2. JavaScript input file上传前获取文件名、文件类型、文件大小等信息

    document.getElementById("productImgInput").files[0].type document.getElementById("pro ...

  3. Unity-Tween

    1.GoKit 免费开源 AssetStore:https://www.assetstore.unity3d.com/en/#!/content/3663 下载地址:https://github.co ...

  4. python购物&常用字符处理方法

    python 一个购物车的例子 1 #!/usr/bin/env python 2 # -*- coding:utf-8 -*- 3 '''购物车''' 4 5 goods = [ 6 7 {&quo ...

  5. Enterprise Library系列文章目录(转载)

    1. Microsoft Enterprise Library 5.0 系列(一) Caching Application Block (初级) 2. Microsoft Enterprise Lib ...

  6. c#之Redis实践list,hashtable

    写在前面 最近公司搞了一个活动,用到了redis的队列,就研究了下redis的相关内容.也顺手做了个demo. C#之使用Redis 可以通过Nuget安装Reidis的相关程序集.安装之后发现会引入 ...

  7. linux rpm 安装包 信息查询

    来源:http://blog.csdn.net/namesliu/article/details/6004388 以CentOS5.5 中已经集成安装了 Apache.MySQL.PHP作为样例,我们 ...

  8. CSS居中完全解决方案

    上次面试面试官问到了,问了个定宽局中和不定宽局中,下来我把所有有关CSS居中都总结了一下 原文摘自我的前端博客,欢迎大家来访问 http://www.hacke2.cn 水平居中 行内元素 把行内元素 ...

  9. ReactJS入门学习二

    ReactJS入门学习二 阅读目录 React的背景和基本原理 理解React.render() 什么是JSX? 为什么要使用JSX? JSX的语法 如何在JSX中如何使用事件 如何在JSX中如何使用 ...

  10. POJ 2513 Colored Sticks

    Colored Sticks Time Limit: 5000MS   Memory Limit: 128000K Total Submissions: 28036   Accepted: 7428 ...