modify
This commit is contained in:
parent
e0878377f1
commit
458f57759a
@ -43,6 +43,12 @@
|
|||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>4.12</version>
|
<version>4.12</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<!--Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: 10582-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.thoughtworks.paranamer</groupId>
|
||||||
|
<artifactId>paranamer</artifactId>
|
||||||
|
<version>2.8</version>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
|
|
||||||
|
@ -12,19 +12,27 @@ import java.util.List;
|
|||||||
|
|
||||||
public class TransformationTest {
|
public class TransformationTest {
|
||||||
|
|
||||||
|
|
||||||
private static JavaSparkContext sc = null;
|
private static JavaSparkContext sc = null;
|
||||||
|
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void prepare() {
|
public void prepare() {
|
||||||
SparkConf conf = new SparkConf().setMaster("local[2]").setAppName("TransformationTest");
|
SparkConf conf = new SparkConf().setAppName("TransformationTest").setMaster("local[2]");
|
||||||
sc = new JavaSparkContext(conf);
|
sc = new JavaSparkContext(conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void map() {
|
public void map() {
|
||||||
List<Integer> list = Arrays.asList(3, 6, 9, 10, 12, 21);
|
List<Integer> list = Arrays.asList(3, 6, 9, 10, 12, 21);
|
||||||
sc.parallelize(list).map(x -> x * 10).foreach(System.out::println);
|
/*
|
||||||
|
* 不要使用方法引用的形式 : System.out::println , 否则会抛出下面的异常:
|
||||||
|
* org.apache.spark.SparkException: Task not serializable
|
||||||
|
* Caused by: java.io.NotSerializableException: java.io.PrintStream
|
||||||
|
* 这是由于Spark程序中map、foreach等算子内部引用了类成员函数或变量时,需要该类所有成员都支持序列化,
|
||||||
|
* 如果该类某些成员变量不支持序列化,就会抛出上面的异常
|
||||||
|
*/
|
||||||
|
sc.parallelize(list).map(x -> x * 10).foreach(x -> System.out.println(x));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
package rdd.scala
|
|
||||||
|
|
||||||
import org.apache.spark.{SparkConf, SparkContext}
|
|
||||||
|
|
||||||
object Test extends App {
|
|
||||||
|
|
||||||
|
|
||||||
val conf = new SparkConf().setAppName("TransformationTest123").setMaster("local[2]")
|
|
||||||
val sc = new SparkContext(conf)
|
|
||||||
|
|
||||||
val list = List(3, 6, 9, 10, 12, 21)
|
|
||||||
sc.parallelize(list).map(_ * 10).foreach(println)
|
|
||||||
|
|
||||||
}
|
|
@ -1,17 +1,13 @@
|
|||||||
package rdd.scala
|
package rdd.scala
|
||||||
|
|
||||||
import org.apache.spark.{SparkConf, SparkContext}
|
import org.apache.spark.{SparkConf, SparkContext}
|
||||||
import org.junit.{Before, Test}
|
import org.junit.{After, Test}
|
||||||
|
|
||||||
class TransformationTest extends {
|
class TransformationTest {
|
||||||
|
|
||||||
var sc: SparkContext = _
|
val conf: SparkConf = new SparkConf().setAppName("TransformationTest").setMaster("local[2]")
|
||||||
|
val sc = new SparkContext(conf)
|
||||||
|
|
||||||
@Before
|
|
||||||
def prepare(): Unit = {
|
|
||||||
val conf = new SparkConf().setAppName("TransformationTest").setMaster("local[2]")
|
|
||||||
sc = new SparkContext(conf)
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def map(): Unit = {
|
def map(): Unit = {
|
||||||
@ -19,5 +15,10 @@ class TransformationTest extends {
|
|||||||
sc.parallelize(list).map(_ * 10).foreach(println)
|
sc.parallelize(list).map(_ * 10).foreach(println)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
def destroy(): Unit = {
|
||||||
|
sc.stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
Loading…
x
Reference in New Issue
Block a user