You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by se...@apache.org on 2014/07/12 14:48:36 UTC

[57/73] [abbrv] prefix all projects in addons and quickstarts with flink-

http://git-wip-us.apache.org/repos/asf/incubator-flink/blob/4771efc2/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/pom.xml
----------------------------------------------------------------------
diff --git a/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/pom.xml b/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/pom.xml
deleted file mode 100644
index 0a2589b..0000000
--- a/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/pom.xml
+++ /dev/null
@@ -1,140 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-
-  <groupId>${groupId}</groupId>
-  <artifactId>${artifactId}</artifactId>
-  <version>${version}</version>
-  <packaging>jar</packaging>
-
-  <name>Your Job's Name</name>
-  <url>http://www.myorganization.org</url>
-
-
-  <properties>
-    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-  </properties>
-
-  <!--  These two requirements are the minimum to use and develop Flink.
-        You can add others like <artifactId>flink-scala</artifactId> for Scala!
-  -->
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.flink</groupId>
-      <artifactId>flink-scala</artifactId>
-      <version>0.6-incubating-SNAPSHOT</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.flink</groupId>
-      <artifactId>flink-clients</artifactId>
-      <version>0.6-incubating-SNAPSHOT</version>
-    </dependency>
-  </dependencies>
-
-  <!--  We use the maven-jar-plugin to generate a runnable jar that you can
-        submit to your Flink cluster.
-  -->
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <version>2.4</version>
-        <configuration>
-          <archive>
-            <manifestEntries>
-              <program-class>${package}.Job</program-class>
-            </manifestEntries>
-          </archive>
-        </configuration>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>3.1</version>
-        <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>net.alchim31.maven</groupId>
-        <artifactId>scala-maven-plugin</artifactId>
-        <version>3.1.4</version>
-        <executions>
-          <execution>
-            <goals>
-              <goal>compile</goal>
-              <goal>testCompile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-
-      <!-- Eclipse Integration -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-eclipse-plugin</artifactId>
-        <version>2.8</version>
-        <configuration>
-          <downloadSources>true</downloadSources>
-          <projectnatures>
-            <projectnature>org.scala-ide.sdt.core.scalanature</projectnature>
-            <projectnature>org.eclipse.jdt.core.javanature</projectnature>
-          </projectnatures>
-          <buildcommands>
-            <buildcommand>org.scala-ide.sdt.core.scalabuilder</buildcommand>
-          </buildcommands>
-          <classpathContainers>
-            <classpathContainer>org.scala-ide.sdt.launching.SCALA_CONTAINER</classpathContainer>
-            <classpathContainer>org.eclipse.jdt.launching.JRE_CONTAINER</classpathContainer>
-          </classpathContainers>
-          <excludes>
-            <exclude>org.scala-lang:scala-library</exclude>
-            <exclude>org.scala-lang:scala-compiler</exclude>
-          </excludes>
-          <sourceIncludes>
-            <sourceInclude>**/*.scala</sourceInclude>
-            <sourceInclude>**/*.java</sourceInclude>
-          </sourceIncludes>
-        </configuration>
-      </plugin>
-
-      <!-- Adding scala source directories to build path -->
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <version>1.7</version>
-        <executions>
-          <!-- Add src/main/scala to eclipse build path -->
-          <execution>
-            <id>add-source</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>add-source</goal>
-            </goals>
-            <configuration>
-              <sources>
-                <source>src/main/scala</source>
-              </sources>
-            </configuration>
-          </execution>
-          <!-- Add src/test/scala to eclipse build path -->
-          <execution>
-            <id>add-test-source</id>
-            <phase>generate-test-sources</phase>
-            <goals>
-              <goal>add-test-source</goal>
-            </goals>
-            <configuration>
-              <sources>
-                <source>src/test/scala</source>
-              </sources>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-flink/blob/4771efc2/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/src/main/scala/Job.scala
----------------------------------------------------------------------
diff --git a/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/src/main/scala/Job.scala b/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/src/main/scala/Job.scala
deleted file mode 100644
index b179243..0000000
--- a/flink-quickstart/quickstart-scala/src/main/resources/archetype-resources/src/main/scala/Job.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-package ${package};
-
-
-import org.apache.flink.api.common.Program
-import org.apache.flink.api.common.ProgramDescription
-import org.apache.flink.client.LocalExecutor
-import org.apache.flink.api.scala.TextFile
-import org.apache.flink.api.scala.ScalaPlan
-import org.apache.flink.api.scala._
-import org.apache.flink.api.scala.operators._
-import org.apache.flink.client.RemoteExecutor
-
-// You can run this locally using:
-// mvn exec:exec -Dexec.executable="java" -Dexec.args="-cp %classpath ${package}.RunJobLocal 2 file:///some/path file:///some/other/path"
-object RunJobLocal {
-  def main(args: Array[String]) {
-    val job = new Job
-    if (args.size < 3) {
-      println(job.getDescription)
-      return
-    }
-    val plan = job.getScalaPlan(args(0).toInt, args(1), args(2))
-    LocalExecutor.execute(plan)
-    System.exit(0)
-  }
-}
-
-// You can run this on a cluster using:
-// mvn exec:exec -Dexec.executable="java" -Dexec.args="-cp %classpath ${package}.RunJobRemote 2 file:///some/path file:///some/other/path"
-object RunJobRemote {
-  def main(args: Array[String]) {
-    val job = new Job
-    if (args.size < 3) {
-      println(job.getDescription)
-      return
-    }
-    val plan = job.getScalaPlan(args(0).toInt, args(1), args(2))
-    // This will create an executor to run the plan on a cluster. We assume
-    // that the JobManager is running on the local machine on the default
-    // port. Change this according to your configuration.
-    // You will also need to change the name of the jar if you change the
-    // project name and/or version. Before running this you also need
-    // to run "mvn package" to create the jar.
-    val ex = new RemoteExecutor("localhost", 6123, "target/flink-project-0.1-SNAPSHOT.jar")
-    ex.executePlan(plan)
-  }
-}
-
-
-/**
- * This is a outline for a Flink scala job. It is actually the WordCount
- * example from the here distribution.
- *
- * You can run it out of your IDE using the main() method of RunJob.
- * This will use the LocalExecutor to start a little Flink instance
- * out of your IDE.
- *
- * You can also generate a .jar file that you can submit on your Flink
- * cluster.
- * Just type
- *      mvn clean package
- * in the projects root directory.
- * You will find the jar in
- *      target/flink-quickstart-0.1-SNAPSHOT-Sample.jar
- *
- */
-class Job extends Program with ProgramDescription with Serializable {
-  override def getDescription() = {
-    "Parameters: [numSubStasks] [input] [output]"
-  }
-  override def getPlan(args: String*) = {
-    getScalaPlan(args(0).toInt, args(1), args(2))
-  }
-
-  def formatOutput = (word: String, count: Int) => "%s %d".format(word, count)
-
-  def getScalaPlan(numSubTasks: Int, textInput: String, wordsOutput: String) = {
-    val input = TextFile(textInput)
-
-    val words = input flatMap { _.toLowerCase().split("""\W+""") filter { _ != "" } map { (_, 1) } }
-    val counts = words groupBy { case (word, _) => word } reduce { (w1, w2) => (w1._1, w1._2 + w2._2) }
-
-    counts neglects { case (word, _) => word }
-    counts preserves({ case (word, _) => word }, { case (word, _) => word })
-    val output = counts.write(wordsOutput, DelimitedOutputFormat(formatOutput.tupled))
-
-    val plan = new ScalaPlan(Seq(output), "Word Count (immutable)")
-    plan.setDefaultParallelism(numSubTasks)
-    plan
-  }
-}