/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.heibaiying import org.apache.flink.api.scala._ /** * Skeleton for a Flink Batch Job. * * For a tutorial how to write a Flink batch application, check the * tutorials and examples on the Flink Website. * * To package your application into a JAR file for execution, * change the main class in the POM.xml file to this class (simply search for 'mainClass') * and run 'mvn clean package' on the command line. */ object BatchJob { def main(args: Array[String]) { // set up the batch execution environment val env = ExecutionEnvironment.getExecutionEnvironment /* * Here, you can start creating your execution plan for Flink. * * Start with getting some data from the environment, like * env.readTextFile(textPath); * * then, transform the resulting DataSet[String] using operations * like * .filter() * .flatMap() * .join() * .group() * * and many more. * Have a look at the programming guide: * * http://flink.apache.org/docs/latest/apis/batch/index.html * * and the examples * * http://flink.apache.org/docs/latest/apis/batch/examples.html * */ // execute program env.execute("Flink Batch Scala API Skeleton") } }