1.spark on yarn

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._ /**
* Created by zzy on 8/27/15.
*/
object SaprkOnYarn {
def main(args: Array[String]) { if(args.length !=2){
println("args lenght !=2")
System.exit(0)
}
// val conf = new SparkConf()
// conf.setMa val sc = new SparkContext // val file = sc.textFile(args(0)) // file.cache // val tmp = file.flatMap(_.split(" ")).map(line => (line,1)).reduceByKey(_ + _) tmp.saveAsTextFile(args(1)) }
}

.csharpcode, .csharpcode pre
{
font-size: small;
color: black;
font-family: consolas, "Courier New", courier, monospace;
background-color: #ffffff;
/*white-space: pre;*/
}
.csharpcode pre { margin: 0em; }
.csharpcode .rem { color: #008000; }
.csharpcode .kwrd { color: #0000ff; }
.csharpcode .str { color: #006080; }
.csharpcode .op { color: #0000c0; }
.csharpcode .preproc { color: #cc6633; }
.csharpcode .asp { background-color: #ffff00; }
.csharpcode .html { color: #800000; }
.csharpcode .attr { color: #ff0000; }
.csharpcode .alt
{
background-color: #f4f4f4;
width: 100%;
margin: 0em;
}
.csharpcode .lnum { color: #606060; }

2.spark on standalone

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._ /**
* Created by zzy on 8/27/15.
*/
object SsdTest {
def main(args: Array[String]) { if(args.length !=2){
println("args lenght !=2")
System.exit(0)
}
// val conf = new SparkConf()
// conf.setMa val conf = new SparkConf() //
conf.setMaster("spark://192.168.122.213:7077") //
conf.setSparkHome("/usr/local/spark/spark-1.4.1-bin-hadoop2.6") //
conf.setAppName("StandaloneSparktest") //
conf.set("SPARK_EXECUTOR_MEMORY", "1g") // val sc = new SparkContext(conf) // val file = sc.textFile(args(0)) // file.cache // val tmp = file.flatMap(_.split(" ")).map(line => (line,1)).reduceByKey(_ + _) tmp.saveAsTextFile(args(1)) } }

.csharpcode, .csharpcode pre
{
font-size: small;
color: black;
font-family: consolas, "Courier New", courier, monospace;
background-color: #ffffff;
/*white-space: pre;*/
}
.csharpcode pre { margin: 0em; }
.csharpcode .rem { color: #008000; }
.csharpcode .kwrd { color: #0000ff; }
.csharpcode .str { color: #006080; }
.csharpcode .op { color: #0000c0; }
.csharpcode .preproc { color: #cc6633; }
.csharpcode .asp { background-color: #ffff00; }
.csharpcode .html { color: #800000; }
.csharpcode .attr { color: #ff0000; }
.csharpcode .alt
{
background-color: #f4f4f4;
width: 100%;
margin: 0em;
}
.csharpcode .lnum { color: #606060; }

3.pom 文件配置

<repositories>
<repository>
<id>Akka repository</id>
<url>http://repo.akka.io/releases</url>
</repository>
<repository>
<id>cloudera</id>
<url>https://repository.cloudera.com/artifactory/cloudera-repos/.</url>
</repository>
<repository>
<id>jboss</id>
<url>http://repository.jboss.org/nexus/content/groups/public-jboss</url>
</repository>
<repository>
<id>Sonatype snapshots</id>
<url>http://oss.sonatype.org/content/repositories/snapshots/</url>
</repository>
</repositories> <build>
<sourceDirectory>src/</sourceDirectory>
<testSourceDirectory>src/</testSourceDirectory> <plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<scalaVersion>2.10.3</scalaVersion>
</configuration>
</plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers> <transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer> <transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
</transformer> </transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build> <dependencies>
<!--spark-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.5.0-cdh5.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tools_2.10</artifactId>
<version>1.1.0-cdh5.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-assembly_2.10</artifactId>
<version>1.2.0-cdh5.3.3</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-repl_2.10</artifactId>
<version>1.2.0-cdh5.3.3</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-network-common_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency> <!--spark on yarn-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-yarn_2.10</artifactId>
<version>1.2.0-cdh5.3.3</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-network-yarn_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency> <!--spark-sql-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive-thriftserver_2.10</artifactId>
<version>1.2.0-cdh5.3.3</version>
</dependency> <!--spark-streaming-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-flume_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-flume-sink_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka_2.10</artifactId>
<version>1.2.0-cdh5.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-assembly_2.10</artifactId>
<version>1.3.0-cdh5.4.0</version>
</dependency>
</dependencies>

.csharpcode, .csharpcode pre
{
font-size: small;
color: black;
font-family: consolas, "Courier New", courier, monospace;
background-color: #ffffff;
/*white-space: pre;*/
}
.csharpcode pre { margin: 0em; }
.csharpcode .rem { color: #008000; }
.csharpcode .kwrd { color: #0000ff; }
.csharpcode .str { color: #006080; }
.csharpcode .op { color: #0000c0; }
.csharpcode .preproc { color: #cc6633; }
.csharpcode .asp { background-color: #ffff00; }
.csharpcode .html { color: #800000; }
.csharpcode .attr { color: #ff0000; }
.csharpcode .alt
{
background-color: #f4f4f4;
width: 100%;
margin: 0em;
}
.csharpcode .lnum { color: #606060; }

仅此献给努力的你我!


.csharpcode, .csharpcode pre
{
font-size: small;
color: black;
font-family: consolas, "Courier New", courier, monospace;
background-color: #ffffff;
/*white-space: pre;*/
}
.csharpcode pre { margin: 0em; }
.csharpcode .rem { color: #008000; }
.csharpcode .kwrd { color: #0000ff; }
.csharpcode .str { color: #006080; }
.csharpcode .op { color: #0000c0; }
.csharpcode .preproc { color: #cc6633; }
.csharpcode .asp { background-color: #ffff00; }
.csharpcode .html { color: #800000; }
.csharpcode .attr { color: #ff0000; }
.csharpcode .alt
{
background-color: #f4f4f4;
width: 100%;
margin: 0em;
}
.csharpcode .lnum { color: #606060; }

Spak之开发代码 _pom 配置的更多相关文章

  1. XData -–无需开发、基于配置的数据库RESTful服务,可作为移动App和ExtJS、WPF/Silverlight、Ajax等应用的服务端

    XData -–无需开发.基于配置的数据库RESTful服务,可作为移动App和ExtJS.WPF/Silverlight.Ajax等应用的服务端   源起一个App项目,Web服务器就一台,已经装了 ...

  2. AngularJS-系统代码的配置和翻译

    前言:在Web开发中常常会遇到这样的情况,有些页面的下拉选项是固定不变的几个,比如:性别,一般有男.女.保密等.对于这样的情形我们一般在数据库中存储的是数字或者其对应的代码,如果是可维护的需要系统给出 ...

  3. 快速学习C语言三: 开发环境, VIM配置, TCP基础,Linux开发基础,Socket开发基础

    上次学了一些C开发相关的工具,这次再配置一下VIM,让开发过程更爽一些. 另外再学一些linux下网络开发的基础,好多人学C也是为了做网络开发. 开发环境 首先得有个Linux环境,有时候家里机器是W ...

  4. 信息安全系统设计基础实验一:Linux开发环境的配置和使用

    北京电子科技学院(BESTI) 实验报告 课程:信息安全系统设计基础    班级:1353 姓名:芦畅 傅冬菁 学号:20135308 20135311 成绩:       指导教师:娄家鹏      ...

  5. 黑客破译android开发代码真就那么简单?

    很多程序员辛辛苦苦开发出的android开发代码,很容易就被黑客翻译了. Google似乎也发现了这个问题,从SDK2.3开始我们可以看到在android-sdk-windows\tools\下面多了 ...

  6. android 开发代码被黑客破译有那么容易吗?

    很多程序员辛辛苦苦开发出的android开发代码,很容易就被黑客翻译了. Google似乎也发现了这个问题,从SDK2.3开始我们可以看到在android-sdk-windows\tools\下面多了 ...

  7. CNS数据库网站开发环境的配置

    1)下载wamp服务器.使用php来开发 2)配置mysql服务器,主要把原来已有的数据导入到wamp服务器的mysql.改mysql的my.ini里德datadir的地址即可,将datadir设为已 ...

  8. SpringBoot学习(七)-->SpringBoot在web开发中的配置

    SpringBoot在web开发中的配置 Web开发的自动配置类:在Maven Dependencies-->spring-boot-1.5.2.RELEASE.jar-->org.spr ...

  9. 一个电脑的重装到java开发环境安装配置的全过程

    刚拿到一台别人用过的电脑.看着c盘爆满,而且用了还是windows7操作系统,强迫症发作马上就准备重装系统. 之前换固态使用wepe制作U盘启动盘装系统的步骤和过程全部忘记的,贼尴尬. 同事都看不过眼 ...

随机推荐

  1. A.Kaw矩阵代数初步学习笔记 6. Gaussian Elimination

    “矩阵代数初步”(Introduction to MATRIX ALGEBRA)课程由Prof. A.K.Kaw(University of South Florida)设计并讲授. PDF格式学习笔 ...

  2. AngularJs ngHref、ngSrc、ngCopy/ngCut/ngPaste

    ngHref 在Angular程序没完成改变链接上用{{hash}}方式绑定的href值的时候,当用户点击该链接会跳到一个错误的页面. 格式:ng-href=”value” value:表达式. 使用 ...

  3. 【Beta】Daily Scrum 第一天

    [目录] 1.任务进度 2.困难及解决 3.燃尽图 4.代码check-in 5.总结 1. 任务进度 学号 已完成 接下来要完成的 612 添加计时界面返回按键事件,添加SharePreferenc ...

  4. 1 构建Mysql+heartbeat+DRBD+LVS集群应用系统系列之DRBD的搭建

    preface 近来公司利润上升,购买了10几台服务器,趁此机会,把mysql的主从同步的架构进一步扩展,为了适应日益增长的流量.针对mysql架构的扩展,先是咨询前辈,后和同事探讨,准备采用Mysq ...

  5. css中 Span 元素的 width 属性无效果原因及多种解决方案

    先运行下程序看下: <span style='width:300px;'>123</span> 输出:123 可以看到 span会自动根据包含的内容来变化宽度 这是因为:对于内 ...

  6. 提取hdfs文件名的方法

    #!/bin/bash #------------------------------------------------------------- # 把目录中的所有文件getmerge到本地 # ...

  7. TP中验证码的实现

  8. BIOS设置教程

    BIOS设置图解教程之AMI篇 BIOS设置图解教程之AMI篇(目前主板上常见的BIOS主要为AMI与AWARD两个系列,如何辨别BIOS品牌系列请移步,本文详细讲解AMI系列的BIOS设置图解教程, ...

  9. sphinx :undefined reference to `libiconv' 报错解决办法

    sphinx :undefined reference to `libiconv' 报错解决办法   2013-11-30 21:45:39 安装sphinx时不停报错...郁闷在make时报错,错误 ...

  10. css div垂直居中

    <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/ ...