flink datasource
This commit is contained in:
		| @@ -1,54 +0,0 @@ | ||||
| /* | ||||
|  * Licensed to the Apache Software Foundation (ASF) under one | ||||
|  * or more contributor license agreements.  See the NOTICE file | ||||
|  * distributed with this work for additional information | ||||
|  * regarding copyright ownership.  The ASF licenses this file | ||||
|  * to you under the Apache License, Version 2.0 (the | ||||
|  * "License"); you may not use this file except in compliance | ||||
|  * with the License.  You may obtain a copy of the License at | ||||
|  * | ||||
|  *     http://www.apache.org/licenses/LICENSE-2.0 | ||||
|  * | ||||
|  * Unless required by applicable law or agreed to in writing, software | ||||
|  * distributed under the License is distributed on an "AS IS" BASIS, | ||||
|  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
|  * See the License for the specific language governing permissions and | ||||
|  * limitations under the License. | ||||
|  */ | ||||
|  | ||||
| package com.heibaiying; | ||||
|  | ||||
| import org.apache.flink.api.common.functions.FlatMapFunction; | ||||
| import org.apache.flink.api.common.functions.MapFunction; | ||||
| import org.apache.flink.api.common.functions.ReduceFunction; | ||||
| import org.apache.flink.api.java.ExecutionEnvironment; | ||||
| import org.apache.flink.api.java.operators.DataSource; | ||||
| import org.apache.flink.api.java.operators.FlatMapOperator; | ||||
| import org.apache.flink.api.java.tuple.Tuple2; | ||||
| import org.apache.flink.streaming.api.datastream.DataStream; | ||||
| import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; | ||||
| import org.apache.flink.util.Collector; | ||||
|  | ||||
| import java.util.Arrays; | ||||
| import java.util.List; | ||||
|  | ||||
| /** | ||||
|  * Skeleton for a Flink Batch Job. | ||||
|  * | ||||
|  * <p>For a tutorial how to write a Flink batch application, check the | ||||
|  * tutorials and examples on the <a href="http://flink.apache.org/docs/stable/">Flink Website</a>. | ||||
|  * | ||||
|  * <p>To package your application into a JAR file for execution, | ||||
|  * change the main class in the POM.xml file to this class (simply search for 'mainClass') | ||||
|  * and run 'mvn clean package' on the command line. | ||||
|  */ | ||||
| public class BatchJob { | ||||
|  | ||||
|     public static void main(String[] args) throws Exception { | ||||
|  | ||||
|         // set up the batch execution environment | ||||
|         final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); | ||||
|  | ||||
|         env.execute("Flink Batch Java API Skeleton"); | ||||
|     } | ||||
| } | ||||
| @@ -1,56 +1,18 @@ | ||||
| /* | ||||
|  * Licensed to the Apache Software Foundation (ASF) under one | ||||
|  * or more contributor license agreements.  See the NOTICE file | ||||
|  * distributed with this work for additional information | ||||
|  * regarding copyright ownership.  The ASF licenses this file | ||||
|  * to you under the Apache License, Version 2.0 (the | ||||
|  * "License"); you may not use this file except in compliance | ||||
|  * with the License.  You may obtain a copy of the License at | ||||
|  * | ||||
|  *     http://www.apache.org/licenses/LICENSE-2.0 | ||||
|  * | ||||
|  * Unless required by applicable law or agreed to in writing, software | ||||
|  * distributed under the License is distributed on an "AS IS" BASIS, | ||||
|  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
|  * See the License for the specific language governing permissions and | ||||
|  * limitations under the License. | ||||
|  */ | ||||
|  | ||||
| package com.heibaiying; | ||||
|  | ||||
| import org.apache.flink.api.common.functions.*; | ||||
| import org.apache.flink.api.java.operators.DataSource; | ||||
| import org.apache.flink.api.java.tuple.Tuple; | ||||
| import org.apache.flink.api.java.tuple.Tuple2; | ||||
| import org.apache.flink.api.java.tuple.Tuple3; | ||||
| import org.apache.flink.streaming.api.collector.selector.OutputSelector; | ||||
| import org.apache.flink.streaming.api.datastream.*; | ||||
| import org.apache.flink.streaming.api.datastream.DataStreamSource; | ||||
| import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; | ||||
| import org.apache.flink.streaming.api.functions.co.CoMapFunction; | ||||
| import org.apache.flink.util.Collector; | ||||
|  | ||||
| import java.util.ArrayList; | ||||
| import java.util.Arrays; | ||||
| import java.util.List; | ||||
|  | ||||
| /** | ||||
|  * Skeleton for a Flink Streaming Job. | ||||
|  * | ||||
|  * <p>For a tutorial how to write a Flink streaming application, check the | ||||
|  * tutorials and examples on the <a href="http://flink.apache.org/docs/stable/">Flink Website</a>. | ||||
|  * | ||||
|  * <p>To package your application into a JAR file for execution, run | ||||
|  * 'mvn clean package' on the command line. | ||||
|  * | ||||
|  * <p>If you change the name of the main class (with the public static void main(String[] args)) | ||||
|  * method, change the respective entry in the POM.xml file (simply search for 'mainClass'). | ||||
|  */ | ||||
| public class StreamingJob { | ||||
|  | ||||
|     public static void main(String[] args) throws Exception { | ||||
|         // set up the streaming execution environment | ||||
|         final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); | ||||
|     private static final String ROOT_PATH = "D:\\BigData-Notes\\code\\Flink\\flink-basis-java\\src\\main\\resources\\"; | ||||
|  | ||||
|     public static void main(String[] args) throws Exception { | ||||
|  | ||||
|         final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); | ||||
|         DataStreamSource<String> streamSource = env.readTextFile(ROOT_PATH + "log4j.properties"); | ||||
|         streamSource.writeAsText(ROOT_PATH + "out").setParallelism(1); | ||||
|         env.execute(); | ||||
|  | ||||
|     } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user