spring/spring boot+mybatis+phoenix
This commit is contained in:
2
code/spark/spark-base/input/wc.txt
Normal file
2
code/spark/spark-base/input/wc.txt
Normal file
@ -0,0 +1,2 @@
|
||||
hadoop,mapreduce,hadoop
|
||||
spark,spark
|
BIN
code/spark/spark-base/output/wcResult/._SUCCESS.crc
Normal file
BIN
code/spark/spark-base/output/wcResult/._SUCCESS.crc
Normal file
Binary file not shown.
BIN
code/spark/spark-base/output/wcResult/.part-00000.crc
Normal file
BIN
code/spark/spark-base/output/wcResult/.part-00000.crc
Normal file
Binary file not shown.
BIN
code/spark/spark-base/output/wcResult/.part-00001.crc
Normal file
BIN
code/spark/spark-base/output/wcResult/.part-00001.crc
Normal file
Binary file not shown.
0
code/spark/spark-base/output/wcResult/_SUCCESS
Normal file
0
code/spark/spark-base/output/wcResult/_SUCCESS
Normal file
1
code/spark/spark-base/output/wcResult/part-00000
Normal file
1
code/spark/spark-base/output/wcResult/part-00000
Normal file
@ -0,0 +1 @@
|
||||
(mapreduce,1)
|
2
code/spark/spark-base/output/wcResult/part-00001
Normal file
2
code/spark/spark-base/output/wcResult/part-00001
Normal file
@ -0,0 +1,2 @@
|
||||
(spark,2)
|
||||
(hadoop,2)
|
46
code/spark/spark-base/pom.xml
Normal file
46
code/spark/spark-base/pom.xml
Normal file
@ -0,0 +1,46 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.heibaiying</groupId>
|
||||
<artifactId>spark-base</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<properties>
|
||||
<scala.version>2.12.8</scala.version>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>8</source>
|
||||
<target>8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>${scala.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.12</artifactId>
|
||||
<version>2.4.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.thoughtworks.paranamer</groupId>
|
||||
<artifactId>paranamer</artifactId>
|
||||
<version>2.8</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
@ -0,0 +1,16 @@
|
||||
package com.heibaiying.spark.rdd
|
||||
|
||||
import org.apache.spark.{SparkConf, SparkContext}
|
||||
|
||||
|
||||
object WordCount {
|
||||
|
||||
def main(args: Array[String]): Unit = {
|
||||
val conf = new SparkConf().setAppName("sparkBase").setMaster("local[2]")
|
||||
val sc = new SparkContext(conf)
|
||||
val rdd = sc.textFile("input/wc.txt").flatMap(_.split(",")).map((_, 1)).reduceByKey(_ + _)
|
||||
rdd.foreach(println)
|
||||
rdd.saveAsTextFile("output/")
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user