modify
This commit is contained in:
parent
beb52978b7
commit
a94cac32e6
22
README.md
22
README.md
@ -70,10 +70,28 @@
|
|||||||
|
|
||||||
## 三、Spark
|
## 三、Spark
|
||||||
|
|
||||||
|
**Spark Core :**
|
||||||
|
|
||||||
1. Spark简介
|
1. Spark简介
|
||||||
2. [Spark单机版本环境搭建](https://github.com/heibaiying/BigData-Notes/blob/master/notes/installation/Spark%E5%8D%95%E6%9C%BA%E7%89%88%E6%9C%AC%E7%8E%AF%E5%A2%83%E6%90%AD%E5%BB%BA.md)
|
2. [Spark单机版本环境搭建](https://github.com/heibaiying/BigData-Notes/blob/master/notes/installation/Spark%E5%8D%95%E6%9C%BA%E7%89%88%E6%9C%AC%E7%8E%AF%E5%A2%83%E6%90%AD%E5%BB%BA.md)
|
||||||
3. RDD详解
|
3. 弹性式数据集RDD
|
||||||
4. Spark Transformation 和 Action
|
4. Spark命令行的基本使用
|
||||||
|
5. RDD常用算子之——Transformation Action
|
||||||
|
6. RDD常用算子之——Action
|
||||||
|
7. Spark广播变量与累加器
|
||||||
|
|
||||||
|
**Spark SQL :**
|
||||||
|
|
||||||
|
1. DataFrame和DateSet
|
||||||
|
2. Spark SQL之常用SQL语句
|
||||||
|
3. External Data Source
|
||||||
|
|
||||||
|
**Spark Streaming :**
|
||||||
|
|
||||||
|
1. Spark Streaming简介
|
||||||
|
2. DStream常用函数
|
||||||
|
3. Spark Streaming 整合 flume
|
||||||
|
4. Spark Streaming 整合 kafka
|
||||||
|
|
||||||
## 四、Flink
|
## 四、Flink
|
||||||
|
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
|||||||
(mapreduce,1)
|
|
@ -1,2 +0,0 @@
|
|||||||
(spark,2)
|
|
||||||
(hadoop,2)
|
|
@ -3,14 +3,12 @@ package com.heibaiying.spark.rdd
|
|||||||
import org.apache.spark.{SparkConf, SparkContext}
|
import org.apache.spark.{SparkConf, SparkContext}
|
||||||
|
|
||||||
|
|
||||||
object WordCount {
|
object WordCount extends App {
|
||||||
|
|
||||||
def main(args: Array[String]): Unit = {
|
|
||||||
val conf = new SparkConf().setAppName("sparkBase").setMaster("local[2]")
|
val conf = new SparkConf().setAppName("sparkBase").setMaster("local[2]")
|
||||||
val sc = new SparkContext(conf)
|
val sc = new SparkContext(conf)
|
||||||
val rdd = sc.textFile("input/wc.txt").flatMap(_.split(",")).map((_, 1)).reduceByKey(_ + _)
|
val rdd = sc.textFile("input/wc.txt").flatMap(_.split(",")).map((_, 1)).reduceByKey(_ + _)
|
||||||
rdd.foreach(println)
|
rdd.foreach(println)
|
||||||
rdd.saveAsTextFile("output/")
|
rdd.saveAsTextFile("output/")
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
2
notes/Spark-RDD.md
Normal file
2
notes/Spark-RDD.md
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
弹性式数据集RDD
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user