You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ta...@apache.org on 2017/08/15 18:41:56 UTC
[1/5] beam git commit: [BEAM-2740] Hide BeamSqlEnv.
Repository: beam
Updated Branches:
refs/heads/DSL_SQL 9eec6a030 -> a1cc5518e
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRowCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRowCoderTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRowCoderTest.java
deleted file mode 100644
index 8751bbb..0000000
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRowCoderTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import java.math.BigDecimal;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import org.apache.beam.sdk.coders.BeamRecordCoder;
-import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.testing.CoderProperties;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
-import org.apache.calcite.rel.type.RelDataType;
-import org.apache.calcite.rel.type.RelDataTypeFactory;
-import org.apache.calcite.rel.type.RelDataTypeSystem;
-import org.apache.calcite.rel.type.RelProtoDataType;
-import org.apache.calcite.sql.type.SqlTypeName;
-import org.junit.Test;
-
-/**
- * Tests for BeamSqlRowCoder.
- */
-public class BeamSqlRowCoderTest {
-
- @Test
- public void encodeAndDecode() throws Exception {
- final RelProtoDataType protoRowType = new RelProtoDataType() {
- @Override
- public RelDataType apply(RelDataTypeFactory a0) {
- return a0.builder()
- .add("col_tinyint", SqlTypeName.TINYINT)
- .add("col_smallint", SqlTypeName.SMALLINT)
- .add("col_integer", SqlTypeName.INTEGER)
- .add("col_bigint", SqlTypeName.BIGINT)
- .add("col_float", SqlTypeName.FLOAT)
- .add("col_double", SqlTypeName.DOUBLE)
- .add("col_decimal", SqlTypeName.DECIMAL)
- .add("col_string_varchar", SqlTypeName.VARCHAR)
- .add("col_time", SqlTypeName.TIME)
- .add("col_timestamp", SqlTypeName.TIMESTAMP)
- .add("col_boolean", SqlTypeName.BOOLEAN)
- .build();
- }
- };
-
- BeamRecordSqlType beamSQLRowType = CalciteUtils.toBeamRowType(
- protoRowType.apply(new JavaTypeFactoryImpl(
- RelDataTypeSystem.DEFAULT)));
-
- GregorianCalendar calendar = new GregorianCalendar();
- calendar.setTime(new Date());
- BeamRecord row = new BeamRecord(beamSQLRowType
- , Byte.valueOf("1"), Short.valueOf("1"), 1, 1L, 1.1F, 1.1
- , BigDecimal.ZERO, "hello", calendar, new Date(), true);
-
-
- BeamRecordCoder coder = beamSQLRowType.getRecordCoder();
- CoderProperties.coderDecodeEncodeEqual(coder, row);
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTableTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTableTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTableTest.java
deleted file mode 100644
index e5d81fa..0000000
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTableTest.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema.kafka;
-
-import java.io.Serializable;
-import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
-import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.calcite.rel.type.RelDataType;
-import org.apache.calcite.rel.type.RelDataTypeFactory;
-import org.apache.calcite.rel.type.RelProtoDataType;
-import org.apache.calcite.sql.type.SqlTypeName;
-import org.apache.commons.csv.CSVFormat;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
-
-/**
- * Test for BeamKafkaCSVTable.
- */
-public class BeamKafkaCSVTableTest {
- @Rule
- public TestPipeline pipeline = TestPipeline.create();
- public static BeamRecord row1;
- public static BeamRecord row2;
-
- @BeforeClass
- public static void setUp() {
- row1 = new BeamRecord(genRowType(), 1L, 1, 1.0);
-
- row2 = new BeamRecord(genRowType(), 2L, 2, 2.0);
- }
-
- @Test public void testCsvRecorderDecoder() throws Exception {
- PCollection<BeamRecord> result = pipeline
- .apply(
- Create.of("1,\"1\",1.0", "2,2,2.0")
- )
- .apply(ParDo.of(new String2KvBytes()))
- .apply(
- new BeamKafkaCSVTable.CsvRecorderDecoder(genRowType(), CSVFormat.DEFAULT)
- );
-
- PAssert.that(result).containsInAnyOrder(row1, row2);
-
- pipeline.run();
- }
-
- @Test public void testCsvRecorderEncoder() throws Exception {
- PCollection<BeamRecord> result = pipeline
- .apply(
- Create.of(row1, row2)
- )
- .apply(
- new BeamKafkaCSVTable.CsvRecorderEncoder(genRowType(), CSVFormat.DEFAULT)
- ).apply(
- new BeamKafkaCSVTable.CsvRecorderDecoder(genRowType(), CSVFormat.DEFAULT)
- );
-
- PAssert.that(result).containsInAnyOrder(row1, row2);
-
- pipeline.run();
- }
-
- private static BeamRecordSqlType genRowType() {
- return CalciteUtils.toBeamRowType(new RelProtoDataType() {
-
- @Override public RelDataType apply(RelDataTypeFactory a0) {
- return a0.builder().add("order_id", SqlTypeName.BIGINT)
- .add("site_id", SqlTypeName.INTEGER)
- .add("price", SqlTypeName.DOUBLE).build();
- }
- }.apply(BeamQueryPlanner.TYPE_FACTORY));
- }
-
- private static class String2KvBytes extends DoFn<String, KV<byte[], byte[]>>
- implements Serializable {
- @ProcessElement
- public void processElement(ProcessContext ctx) {
- ctx.output(KV.of(new byte[] {}, ctx.element().getBytes()));
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableTest.java
deleted file mode 100644
index 8935c46..0000000
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableTest.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema.text;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.nio.file.FileVisitResult;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.SimpleFileVisitor;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
-import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.calcite.rel.type.RelDataType;
-import org.apache.calcite.rel.type.RelDataTypeFactory;
-import org.apache.calcite.rel.type.RelProtoDataType;
-import org.apache.calcite.sql.type.SqlTypeName;
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVPrinter;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
-
-/**
- * Tests for {@code BeamTextCSVTable}.
- */
-public class BeamTextCSVTableTest {
-
- @Rule public TestPipeline pipeline = TestPipeline.create();
- @Rule public TestPipeline pipeline2 = TestPipeline.create();
-
- /**
- * testData.
- *
- * <p>
- * The types of the csv fields are:
- * integer,bigint,float,double,string
- * </p>
- */
- private static Object[] data1 = new Object[] { 1, 1L, 1.1F, 1.1, "james" };
- private static Object[] data2 = new Object[] { 2, 2L, 2.2F, 2.2, "bond" };
-
- private static List<Object[]> testData = Arrays.asList(data1, data2);
- private static List<BeamRecord> testDataRows = new ArrayList<BeamRecord>() {{
- for (Object[] data : testData) {
- add(buildRow(data));
- }
- }};
-
- private static Path tempFolder;
- private static File readerSourceFile;
- private static File writerTargetFile;
-
- @Test public void testBuildIOReader() {
- PCollection<BeamRecord> rows = new BeamTextCSVTable(buildBeamSqlRowType(),
- readerSourceFile.getAbsolutePath()).buildIOReader(pipeline);
- PAssert.that(rows).containsInAnyOrder(testDataRows);
- pipeline.run();
- }
-
- @Test public void testBuildIOWriter() {
- new BeamTextCSVTable(buildBeamSqlRowType(),
- readerSourceFile.getAbsolutePath()).buildIOReader(pipeline)
- .apply(new BeamTextCSVTable(buildBeamSqlRowType(), writerTargetFile.getAbsolutePath())
- .buildIOWriter());
- pipeline.run();
-
- PCollection<BeamRecord> rows = new BeamTextCSVTable(buildBeamSqlRowType(),
- writerTargetFile.getAbsolutePath()).buildIOReader(pipeline2);
-
- // confirm the two reads match
- PAssert.that(rows).containsInAnyOrder(testDataRows);
- pipeline2.run();
- }
-
- @BeforeClass public static void setUp() throws IOException {
- tempFolder = Files.createTempDirectory("BeamTextTableTest");
- readerSourceFile = writeToFile(testData, "readerSourceFile.txt");
- writerTargetFile = writeToFile(testData, "writerTargetFile.txt");
- }
-
- @AfterClass public static void teardownClass() throws IOException {
- Files.walkFileTree(tempFolder, new SimpleFileVisitor<Path>() {
-
- @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
- throws IOException {
- Files.delete(file);
- return FileVisitResult.CONTINUE;
- }
-
- @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc)
- throws IOException {
- Files.delete(dir);
- return FileVisitResult.CONTINUE;
- }
- });
- }
-
- private static File writeToFile(List<Object[]> rows, String filename) throws IOException {
- File file = tempFolder.resolve(filename).toFile();
- OutputStream output = new FileOutputStream(file);
- writeToStreamAndClose(rows, output);
- return file;
- }
-
- /**
- * Helper that writes the given lines (adding a newline in between) to a stream, then closes the
- * stream.
- */
- private static void writeToStreamAndClose(List<Object[]> rows, OutputStream outputStream) {
- try (PrintStream writer = new PrintStream(outputStream)) {
- CSVPrinter printer = CSVFormat.DEFAULT.print(writer);
- for (Object[] row : rows) {
- for (Object field : row) {
- printer.print(field);
- }
- printer.println();
- }
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- private RelProtoDataType buildRowType() {
- return new RelProtoDataType() {
-
- @Override public RelDataType apply(RelDataTypeFactory a0) {
- return a0.builder().add("id", SqlTypeName.INTEGER).add("order_id", SqlTypeName.BIGINT)
- .add("price", SqlTypeName.FLOAT).add("amount", SqlTypeName.DOUBLE)
- .add("user_name", SqlTypeName.VARCHAR).build();
- }
- };
- }
-
- private static RelDataType buildRelDataType() {
- return BeamQueryPlanner.TYPE_FACTORY.builder().add("id", SqlTypeName.INTEGER)
- .add("order_id", SqlTypeName.BIGINT).add("price", SqlTypeName.FLOAT)
- .add("amount", SqlTypeName.DOUBLE).add("user_name", SqlTypeName.VARCHAR).build();
- }
-
- private static BeamRecordSqlType buildBeamSqlRowType() {
- return CalciteUtils.toBeamRowType(buildRelDataType());
- }
-
- private static BeamRecord buildRow(Object[] data) {
- return new BeamRecord(buildBeamSqlRowType(), Arrays.asList(data));
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamAggregationTransformTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamAggregationTransformTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamAggregationTransformTest.java
deleted file mode 100644
index 64f2ccd..0000000
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamAggregationTransformTest.java
+++ /dev/null
@@ -1,453 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema.transform;
-
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import org.apache.beam.sdk.coders.BeamRecordCoder;
-import org.apache.beam.sdk.coders.IterableCoder;
-import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
-import org.apache.beam.sdk.extensions.sql.impl.transform.BeamAggregationTransforms;
-import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.testing.PAssert;
-import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.transforms.Combine;
-import org.apache.beam.sdk.transforms.Create;
-import org.apache.beam.sdk.transforms.GroupByKey;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.transforms.WithKeys;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.calcite.rel.core.AggregateCall;
-import org.apache.calcite.rel.type.RelDataTypeFactory.FieldInfoBuilder;
-import org.apache.calcite.rel.type.RelDataTypeSystem;
-import org.apache.calcite.sql.SqlKind;
-import org.apache.calcite.sql.fun.SqlAvgAggFunction;
-import org.apache.calcite.sql.fun.SqlCountAggFunction;
-import org.apache.calcite.sql.fun.SqlMinMaxAggFunction;
-import org.apache.calcite.sql.fun.SqlSumAggFunction;
-import org.apache.calcite.sql.type.BasicSqlType;
-import org.apache.calcite.sql.type.SqlTypeName;
-import org.apache.calcite.util.ImmutableBitSet;
-import org.junit.Rule;
-import org.junit.Test;
-
-/**
- * Unit tests for {@link BeamAggregationTransforms}.
- *
- */
-public class BeamAggregationTransformTest extends BeamTransformBaseTest{
-
- @Rule
- public TestPipeline p = TestPipeline.create();
-
- private List<AggregateCall> aggCalls;
-
- private BeamRecordSqlType keyType;
- private BeamRecordSqlType aggPartType;
- private BeamRecordSqlType outputType;
-
- private BeamRecordCoder inRecordCoder;
- private BeamRecordCoder keyCoder;
- private BeamRecordCoder aggCoder;
- private BeamRecordCoder outRecordCoder;
-
- /**
- * This step equals to below query.
- * <pre>
- * SELECT `f_int`
- * , COUNT(*) AS `size`
- * , SUM(`f_long`) AS `sum1`, AVG(`f_long`) AS `avg1`
- * , MAX(`f_long`) AS `max1`, MIN(`f_long`) AS `min1`
- * , SUM(`f_short`) AS `sum2`, AVG(`f_short`) AS `avg2`
- * , MAX(`f_short`) AS `max2`, MIN(`f_short`) AS `min2`
- * , SUM(`f_byte`) AS `sum3`, AVG(`f_byte`) AS `avg3`
- * , MAX(`f_byte`) AS `max3`, MIN(`f_byte`) AS `min3`
- * , SUM(`f_float`) AS `sum4`, AVG(`f_float`) AS `avg4`
- * , MAX(`f_float`) AS `max4`, MIN(`f_float`) AS `min4`
- * , SUM(`f_double`) AS `sum5`, AVG(`f_double`) AS `avg5`
- * , MAX(`f_double`) AS `max5`, MIN(`f_double`) AS `min5`
- * , MAX(`f_timestamp`) AS `max7`, MIN(`f_timestamp`) AS `min7`
- * ,SUM(`f_int2`) AS `sum8`, AVG(`f_int2`) AS `avg8`
- * , MAX(`f_int2`) AS `max8`, MIN(`f_int2`) AS `min8`
- * FROM TABLE_NAME
- * GROUP BY `f_int`
- * </pre>
- * @throws ParseException
- */
- @Test
- public void testCountPerElementBasic() throws ParseException {
- setupEnvironment();
-
- PCollection<BeamRecord> input = p.apply(Create.of(inputRows));
-
- //1. extract fields in group-by key part
- PCollection<KV<BeamRecord, BeamRecord>> exGroupByStream = input.apply("exGroupBy",
- WithKeys
- .of(new BeamAggregationTransforms.AggregationGroupByKeyFn(-1, ImmutableBitSet.of(0))))
- .setCoder(KvCoder.<BeamRecord, BeamRecord>of(keyCoder, inRecordCoder));
-
- //2. apply a GroupByKey.
- PCollection<KV<BeamRecord, Iterable<BeamRecord>>> groupedStream = exGroupByStream
- .apply("groupBy", GroupByKey.<BeamRecord, BeamRecord>create())
- .setCoder(KvCoder.<BeamRecord, Iterable<BeamRecord>>of(keyCoder,
- IterableCoder.<BeamRecord>of(inRecordCoder)));
-
- //3. run aggregation functions
- PCollection<KV<BeamRecord, BeamRecord>> aggregatedStream = groupedStream.apply("aggregation",
- Combine.<BeamRecord, BeamRecord, BeamRecord>groupedValues(
- new BeamAggregationTransforms.AggregationAdaptor(aggCalls, inputRowType)))
- .setCoder(KvCoder.<BeamRecord, BeamRecord>of(keyCoder, aggCoder));
-
- //4. flat KV to a single record
- PCollection<BeamRecord> mergedStream = aggregatedStream.apply("mergeRecord",
- ParDo.of(new BeamAggregationTransforms.MergeAggregationRecord(outputType, aggCalls, -1)));
- mergedStream.setCoder(outRecordCoder);
-
- //assert function BeamAggregationTransform.AggregationGroupByKeyFn
- PAssert.that(exGroupByStream).containsInAnyOrder(prepareResultOfAggregationGroupByKeyFn());
-
- //assert BeamAggregationTransform.AggregationCombineFn
- PAssert.that(aggregatedStream).containsInAnyOrder(prepareResultOfAggregationCombineFn());
-
- //assert BeamAggregationTransform.MergeAggregationRecord
- PAssert.that(mergedStream).containsInAnyOrder(prepareResultOfMergeAggregationRecord());
-
- p.run();
-}
-
- private void setupEnvironment() {
- prepareAggregationCalls();
- prepareTypeAndCoder();
- }
-
- /**
- * create list of all {@link AggregateCall}.
- */
- @SuppressWarnings("deprecation")
- private void prepareAggregationCalls() {
- //aggregations for all data type
- aggCalls = new ArrayList<>();
- aggCalls.add(
- new AggregateCall(new SqlCountAggFunction(), false,
- Arrays.<Integer>asList(),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
- "count")
- );
- aggCalls.add(
- new AggregateCall(new SqlSumAggFunction(
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT)), false,
- Arrays.<Integer>asList(1),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
- "sum1")
- );
- aggCalls.add(
- new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
- Arrays.<Integer>asList(1),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
- "avg1")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
- Arrays.<Integer>asList(1),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
- "max1")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
- Arrays.<Integer>asList(1),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
- "min1")
- );
-
- aggCalls.add(
- new AggregateCall(new SqlSumAggFunction(
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT)), false,
- Arrays.<Integer>asList(2),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
- "sum2")
- );
- aggCalls.add(
- new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
- Arrays.<Integer>asList(2),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
- "avg2")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
- Arrays.<Integer>asList(2),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
- "max2")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
- Arrays.<Integer>asList(2),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
- "min2")
- );
-
- aggCalls.add(
- new AggregateCall(
- new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT)),
- false,
- Arrays.<Integer>asList(3),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
- "sum3")
- );
- aggCalls.add(
- new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
- Arrays.<Integer>asList(3),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
- "avg3")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
- Arrays.<Integer>asList(3),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
- "max3")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
- Arrays.<Integer>asList(3),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
- "min3")
- );
-
- aggCalls.add(
- new AggregateCall(
- new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT)),
- false,
- Arrays.<Integer>asList(4),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
- "sum4")
- );
- aggCalls.add(
- new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
- Arrays.<Integer>asList(4),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
- "avg4")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
- Arrays.<Integer>asList(4),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
- "max4")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
- Arrays.<Integer>asList(4),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
- "min4")
- );
-
- aggCalls.add(
- new AggregateCall(
- new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE)),
- false,
- Arrays.<Integer>asList(5),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
- "sum5")
- );
- aggCalls.add(
- new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
- Arrays.<Integer>asList(5),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
- "avg5")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
- Arrays.<Integer>asList(5),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
- "max5")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
- Arrays.<Integer>asList(5),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
- "min5")
- );
-
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
- Arrays.<Integer>asList(7),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TIMESTAMP),
- "max7")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
- Arrays.<Integer>asList(7),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TIMESTAMP),
- "min7")
- );
-
- aggCalls.add(
- new AggregateCall(
- new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER)),
- false,
- Arrays.<Integer>asList(8),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
- "sum8")
- );
- aggCalls.add(
- new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
- Arrays.<Integer>asList(8),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
- "avg8")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
- Arrays.<Integer>asList(8),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
- "max8")
- );
- aggCalls.add(
- new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
- Arrays.<Integer>asList(8),
- new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
- "min8")
- );
- }
-
- /**
- * Coders used in aggregation steps.
- */
- private void prepareTypeAndCoder() {
- inRecordCoder = inputRowType.getRecordCoder();
-
- keyType = initTypeOfSqlRow(Arrays.asList(KV.of("f_int", SqlTypeName.INTEGER)));
- keyCoder = keyType.getRecordCoder();
-
- aggPartType = initTypeOfSqlRow(
- Arrays.asList(KV.of("count", SqlTypeName.BIGINT),
-
- KV.of("sum1", SqlTypeName.BIGINT), KV.of("avg1", SqlTypeName.BIGINT),
- KV.of("max1", SqlTypeName.BIGINT), KV.of("min1", SqlTypeName.BIGINT),
-
- KV.of("sum2", SqlTypeName.SMALLINT), KV.of("avg2", SqlTypeName.SMALLINT),
- KV.of("max2", SqlTypeName.SMALLINT), KV.of("min2", SqlTypeName.SMALLINT),
-
- KV.of("sum3", SqlTypeName.TINYINT), KV.of("avg3", SqlTypeName.TINYINT),
- KV.of("max3", SqlTypeName.TINYINT), KV.of("min3", SqlTypeName.TINYINT),
-
- KV.of("sum4", SqlTypeName.FLOAT), KV.of("avg4", SqlTypeName.FLOAT),
- KV.of("max4", SqlTypeName.FLOAT), KV.of("min4", SqlTypeName.FLOAT),
-
- KV.of("sum5", SqlTypeName.DOUBLE), KV.of("avg5", SqlTypeName.DOUBLE),
- KV.of("max5", SqlTypeName.DOUBLE), KV.of("min5", SqlTypeName.DOUBLE),
-
- KV.of("max7", SqlTypeName.TIMESTAMP), KV.of("min7", SqlTypeName.TIMESTAMP),
-
- KV.of("sum8", SqlTypeName.INTEGER), KV.of("avg8", SqlTypeName.INTEGER),
- KV.of("max8", SqlTypeName.INTEGER), KV.of("min8", SqlTypeName.INTEGER)
- ));
- aggCoder = aggPartType.getRecordCoder();
-
- outputType = prepareFinalRowType();
- outRecordCoder = outputType.getRecordCoder();
- }
-
- /**
- * expected results after {@link BeamAggregationTransforms.AggregationGroupByKeyFn}.
- */
- private List<KV<BeamRecord, BeamRecord>> prepareResultOfAggregationGroupByKeyFn() {
- return Arrays.asList(
- KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(0).getInteger(0))),
- inputRows.get(0)),
- KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(1).getInteger(0))),
- inputRows.get(1)),
- KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(2).getInteger(0))),
- inputRows.get(2)),
- KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(3).getInteger(0))),
- inputRows.get(3)));
- }
-
- /**
- * expected results after {@link BeamAggregationTransforms.AggregationCombineFn}.
- */
- private List<KV<BeamRecord, BeamRecord>> prepareResultOfAggregationCombineFn()
- throws ParseException {
- return Arrays.asList(
- KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(0).getInteger(0))),
- new BeamRecord(aggPartType, Arrays.<Object>asList(
- 4L,
- 10000L, 2500L, 4000L, 1000L,
- (short) 10, (short) 2, (short) 4, (short) 1,
- (byte) 10, (byte) 2, (byte) 4, (byte) 1,
- 10.0F, 2.5F, 4.0F, 1.0F,
- 10.0, 2.5, 4.0, 1.0,
- format.parse("2017-01-01 02:04:03"), format.parse("2017-01-01 01:01:03"),
- 10, 2, 4, 1
- )))
- );
- }
-
- /**
- * Row type of final output row.
- */
- private BeamRecordSqlType prepareFinalRowType() {
- FieldInfoBuilder builder = BeamQueryPlanner.TYPE_FACTORY.builder();
- List<KV<String, SqlTypeName>> columnMetadata =
- Arrays.asList(KV.of("f_int", SqlTypeName.INTEGER), KV.of("count", SqlTypeName.BIGINT),
-
- KV.of("sum1", SqlTypeName.BIGINT), KV.of("avg1", SqlTypeName.BIGINT),
- KV.of("max1", SqlTypeName.BIGINT), KV.of("min1", SqlTypeName.BIGINT),
-
- KV.of("sum2", SqlTypeName.SMALLINT), KV.of("avg2", SqlTypeName.SMALLINT),
- KV.of("max2", SqlTypeName.SMALLINT), KV.of("min2", SqlTypeName.SMALLINT),
-
- KV.of("sum3", SqlTypeName.TINYINT), KV.of("avg3", SqlTypeName.TINYINT),
- KV.of("max3", SqlTypeName.TINYINT), KV.of("min3", SqlTypeName.TINYINT),
-
- KV.of("sum4", SqlTypeName.FLOAT), KV.of("avg4", SqlTypeName.FLOAT),
- KV.of("max4", SqlTypeName.FLOAT), KV.of("min4", SqlTypeName.FLOAT),
-
- KV.of("sum5", SqlTypeName.DOUBLE), KV.of("avg5", SqlTypeName.DOUBLE),
- KV.of("max5", SqlTypeName.DOUBLE), KV.of("min5", SqlTypeName.DOUBLE),
-
- KV.of("max7", SqlTypeName.TIMESTAMP), KV.of("min7", SqlTypeName.TIMESTAMP),
-
- KV.of("sum8", SqlTypeName.INTEGER), KV.of("avg8", SqlTypeName.INTEGER),
- KV.of("max8", SqlTypeName.INTEGER), KV.of("min8", SqlTypeName.INTEGER)
- );
- for (KV<String, SqlTypeName> cm : columnMetadata) {
- builder.add(cm.getKey(), cm.getValue());
- }
- return CalciteUtils.toBeamRowType(builder.build());
- }
-
- /**
- * expected results after {@link BeamAggregationTransforms.MergeAggregationRecord}.
- */
- private BeamRecord prepareResultOfMergeAggregationRecord() throws ParseException {
- return new BeamRecord(outputType, Arrays.<Object>asList(
- 1, 4L,
- 10000L, 2500L, 4000L, 1000L,
- (short) 10, (short) 2, (short) 4, (short) 1,
- (byte) 10, (byte) 2, (byte) 4, (byte) 1,
- 10.0F, 2.5F, 4.0F, 1.0F,
- 10.0, 2.5, 4.0, 1.0,
- format.parse("2017-01-01 02:04:03"), format.parse("2017-01-01 01:01:03"),
- 10, 2, 4, 1
- ));
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamTransformBaseTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamTransformBaseTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamTransformBaseTest.java
deleted file mode 100644
index da6e95b..0000000
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/schema/transform/BeamTransformBaseTest.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema.transform;
-
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.List;
-import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
-import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.KV;
-import org.apache.calcite.rel.type.RelDataTypeFactory.FieldInfoBuilder;
-import org.apache.calcite.sql.type.SqlTypeName;
-import org.junit.BeforeClass;
-
-/**
- * shared methods to test PTransforms which execute Beam SQL steps.
- *
- */
-public class BeamTransformBaseTest {
- public static DateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-
- public static BeamRecordSqlType inputRowType;
- public static List<BeamRecord> inputRows;
-
- @BeforeClass
- public static void prepareInput() throws NumberFormatException, ParseException{
- List<KV<String, SqlTypeName>> columnMetadata = Arrays.asList(
- KV.of("f_int", SqlTypeName.INTEGER), KV.of("f_long", SqlTypeName.BIGINT),
- KV.of("f_short", SqlTypeName.SMALLINT), KV.of("f_byte", SqlTypeName.TINYINT),
- KV.of("f_float", SqlTypeName.FLOAT), KV.of("f_double", SqlTypeName.DOUBLE),
- KV.of("f_string", SqlTypeName.VARCHAR), KV.of("f_timestamp", SqlTypeName.TIMESTAMP),
- KV.of("f_int2", SqlTypeName.INTEGER)
- );
- inputRowType = initTypeOfSqlRow(columnMetadata);
- inputRows = Arrays.asList(
- initBeamSqlRow(columnMetadata,
- Arrays.<Object>asList(1, 1000L, Short.valueOf("1"), Byte.valueOf("1"), 1.0F, 1.0,
- "string_row1", format.parse("2017-01-01 01:01:03"), 1)),
- initBeamSqlRow(columnMetadata,
- Arrays.<Object>asList(1, 2000L, Short.valueOf("2"), Byte.valueOf("2"), 2.0F, 2.0,
- "string_row2", format.parse("2017-01-01 01:02:03"), 2)),
- initBeamSqlRow(columnMetadata,
- Arrays.<Object>asList(1, 3000L, Short.valueOf("3"), Byte.valueOf("3"), 3.0F, 3.0,
- "string_row3", format.parse("2017-01-01 01:03:03"), 3)),
- initBeamSqlRow(columnMetadata, Arrays.<Object>asList(1, 4000L, Short.valueOf("4"),
- Byte.valueOf("4"), 4.0F, 4.0, "string_row4", format.parse("2017-01-01 02:04:03"), 4)));
- }
-
- /**
- * create a {@code BeamSqlRowType} for given column metadata.
- */
- public static BeamRecordSqlType initTypeOfSqlRow(List<KV<String, SqlTypeName>> columnMetadata){
- FieldInfoBuilder builder = BeamQueryPlanner.TYPE_FACTORY.builder();
- for (KV<String, SqlTypeName> cm : columnMetadata) {
- builder.add(cm.getKey(), cm.getValue());
- }
- return CalciteUtils.toBeamRowType(builder.build());
- }
-
- /**
- * Create an empty row with given column metadata.
- */
- public static BeamRecord initBeamSqlRow(List<KV<String, SqlTypeName>> columnMetadata) {
- return initBeamSqlRow(columnMetadata, Arrays.asList());
- }
-
- /**
- * Create a row with given column metadata, and values for each column.
- *
- */
- public static BeamRecord initBeamSqlRow(List<KV<String, SqlTypeName>> columnMetadata,
- List<Object> rowValues){
- BeamRecordSqlType rowType = initTypeOfSqlRow(columnMetadata);
-
- return new BeamRecord(rowType, rowValues);
- }
-
-}
[2/5] beam git commit: [BEAM-2740] Hide BeamSqlEnv.
Posted by ta...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlApiSurfaceTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlApiSurfaceTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlApiSurfaceTest.java
index 08678d1..456662f 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlApiSurfaceTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlApiSurfaceTest.java
@@ -40,15 +40,13 @@ public class BeamSqlApiSurfaceTest {
final Set<String> allowed =
ImmutableSet.of(
"org.apache.beam",
- "org.joda.time",
- "org.apache.commons.csv");
+ "org.joda.time");
ApiSurface surface = ApiSurface
- .ofClass(BeamSqlCli.class)
- .includingClass(BeamSql.class)
- .includingClass(BeamSqlEnv.class)
- .includingPackage("org.apache.beam.sdk.extensions.sql.schema",
- getClass().getClassLoader())
+ .ofClass(BeamSql.class)
+ .includingClass(BeamSqlUdf.class)
+ .includingClass(BeamRecordSqlType.class)
+ .includingClass(BeamSqlRecordHelper.class)
.pruningPrefix("java")
.pruningPattern("org[.]apache[.]beam[.]sdk[.]extensions[.]sql[.].*Test")
.pruningPattern("org[.]apache[.]beam[.]sdk[.]extensions[.]sql[.].*TestBase");
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslAggregationTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslAggregationTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslAggregationTest.java
index db562da..d99ec20 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslAggregationTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslAggregationTest.java
@@ -19,7 +19,6 @@ package org.apache.beam.sdk.extensions.sql;
import java.sql.Types;
import java.util.Arrays;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslBase.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslBase.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslBase.java
index ef75ee2..b27435c 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslBase.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslBase.java
@@ -25,7 +25,6 @@ import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.testing.TestStream;
import org.apache.beam.sdk.transforms.Create;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslJoinTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslJoinTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslJoinTest.java
index 0876dd9..47109e0 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslJoinTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslJoinTest.java
@@ -24,7 +24,6 @@ import static org.apache.beam.sdk.extensions.sql.impl.rel.BeamJoinRelBoundedVsBo
import java.sql.Types;
import java.util.Arrays;
import org.apache.beam.sdk.coders.BeamRecordCoder;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.values.BeamRecord;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslProjectTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslProjectTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslProjectTest.java
index 46aea99..e36eb2b 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslProjectTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslProjectTest.java
@@ -19,7 +19,6 @@ package org.apache.beam.sdk.extensions.sql;
import java.sql.Types;
import java.util.Arrays;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslUdfUdafTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslUdfUdafTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslUdfUdafTest.java
index 1541123..8db9d7a 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslUdfUdafTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslUdfUdafTest.java
@@ -20,8 +20,6 @@ package org.apache.beam.sdk.extensions.sql;
import java.sql.Types;
import java.util.Arrays;
import java.util.Iterator;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlUdf;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
import org.apache.beam.sdk.transforms.SerializableFunction;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
index 373deb7..4a1f8a0 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
@@ -21,7 +21,6 @@ package org.apache.beam.sdk.extensions.sql;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.values.BeamRecord;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/BeamSqlFnExecutorTestBase.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/BeamSqlFnExecutorTestBase.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/BeamSqlFnExecutorTestBase.java
index 97905c5..9d12126 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/BeamSqlFnExecutorTestBase.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/BeamSqlFnExecutorTestBase.java
@@ -19,12 +19,12 @@ package org.apache.beam.sdk.extensions.sql.impl.interpreter;
import java.util.ArrayList;
import java.util.List;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlExpression;
import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
import org.apache.beam.sdk.extensions.sql.impl.planner.BeamRelDataTypeSystem;
import org.apache.beam.sdk.extensions.sql.impl.planner.BeamRuleSets;
import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.config.Lex;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BaseRelTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BaseRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BaseRelTest.java
new file mode 100644
index 0000000..906ccfd
--- /dev/null
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BaseRelTest.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.rel;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PCollection;
+
+/**
+ * Base class for rel test.
+ */
+public class BaseRelTest {
+ public PCollection<BeamRecord> compilePipeline (
+ String sql, Pipeline pipeline, BeamSqlEnv sqlEnv) throws Exception {
+ return sqlEnv.getPlanner().compileBeamPipeline(sql, pipeline, sqlEnv);
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java
index a51cc30..8e41d0a 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java
@@ -19,9 +19,8 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
@@ -34,7 +33,7 @@ import org.junit.Test;
/**
* Test for {@code BeamIntersectRel}.
*/
-public class BeamIntersectRelTest {
+public class BeamIntersectRelTest extends BaseRelTest {
static BeamSqlEnv sqlEnv = new BeamSqlEnv();
@Rule
@@ -77,7 +76,7 @@ public class BeamIntersectRelTest {
+ "SELECT order_id, site_id, price "
+ "FROM ORDER_DETAILS2 ";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
@@ -100,7 +99,7 @@ public class BeamIntersectRelTest {
+ "SELECT order_id, site_id, price "
+ "FROM ORDER_DETAILS2 ";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).satisfies(new CheckSize(3));
PAssert.that(rows).containsInAnyOrder(
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java
index dde1540..e0d691b 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java
@@ -19,9 +19,8 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
@@ -34,10 +33,10 @@ import org.junit.Test;
/**
* Bounded + Bounded Test for {@code BeamJoinRel}.
*/
-public class BeamJoinRelBoundedVsBoundedTest {
+public class BeamJoinRelBoundedVsBoundedTest extends BaseRelTest {
@Rule
public final TestPipeline pipeline = TestPipeline.create();
- private static final BeamSqlEnv beamSqlEnv = new BeamSqlEnv();
+ private static final BeamSqlEnv BEAM_SQL_ENV = new BeamSqlEnv();
public static final MockedBoundedTable ORDER_DETAILS1 =
MockedBoundedTable.of(
@@ -63,8 +62,8 @@ public class BeamJoinRelBoundedVsBoundedTest {
@BeforeClass
public static void prepare() {
- beamSqlEnv.registerTable("ORDER_DETAILS1", ORDER_DETAILS1);
- beamSqlEnv.registerTable("ORDER_DETAILS2", ORDER_DETAILS2);
+ BEAM_SQL_ENV.registerTable("ORDER_DETAILS1", ORDER_DETAILS1);
+ BEAM_SQL_ENV.registerTable("ORDER_DETAILS2", ORDER_DETAILS2);
}
@Test
@@ -77,7 +76,7 @@ public class BeamJoinRelBoundedVsBoundedTest {
+ " o1.order_id=o2.site_id AND o2.price=o1.site_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.INTEGER, "order_id",
@@ -102,7 +101,7 @@ public class BeamJoinRelBoundedVsBoundedTest {
+ " o1.order_id=o2.site_id AND o2.price=o1.site_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
pipeline.enableAbandonedNodeEnforcement(false);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
@@ -130,7 +129,7 @@ public class BeamJoinRelBoundedVsBoundedTest {
+ " o1.order_id=o2.site_id AND o2.price=o1.site_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.INTEGER, "order_id",
@@ -157,7 +156,7 @@ public class BeamJoinRelBoundedVsBoundedTest {
+ " o1.order_id=o2.site_id AND o2.price=o1.site_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.INTEGER, "order_id",
@@ -187,7 +186,7 @@ public class BeamJoinRelBoundedVsBoundedTest {
;
pipeline.enableAbandonedNodeEnforcement(false);
- BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ compilePipeline(sql, pipeline, BEAM_SQL_ENV);
pipeline.run();
}
@@ -198,7 +197,7 @@ public class BeamJoinRelBoundedVsBoundedTest {
+ "FROM ORDER_DETAILS1 o1, ORDER_DETAILS2 o2";
pipeline.enableAbandonedNodeEnforcement(false);
- BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ compilePipeline(sql, pipeline, BEAM_SQL_ENV);
pipeline.run();
}
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java
index 28ad99c..c5145ec 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java
@@ -20,9 +20,8 @@ package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
import java.util.Date;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.transform.BeamSqlOutputToConsoleFn;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.extensions.sql.mock.MockedUnboundedTable;
@@ -39,10 +38,10 @@ import org.junit.Test;
/**
* Unbounded + Unbounded Test for {@code BeamJoinRel}.
*/
-public class BeamJoinRelUnboundedVsBoundedTest {
+public class BeamJoinRelUnboundedVsBoundedTest extends BaseRelTest {
@Rule
public final TestPipeline pipeline = TestPipeline.create();
- private static final BeamSqlEnv beamSqlEnv = new BeamSqlEnv();
+ private static final BeamSqlEnv BEAM_SQL_ENV = new BeamSqlEnv();
public static final Date FIRST_DATE = new Date(1);
public static final Date SECOND_DATE = new Date(1 + 3600 * 1000);
public static final Date THIRD_DATE = new Date(1 + 3600 * 1000 + 3600 * 1000 + 1);
@@ -50,7 +49,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
@BeforeClass
public static void prepare() {
- beamSqlEnv.registerTable("ORDER_DETAILS", MockedUnboundedTable
+ BEAM_SQL_ENV.registerTable("ORDER_DETAILS", MockedUnboundedTable
.of(
Types.INTEGER, "order_id",
Types.INTEGER, "site_id",
@@ -78,7 +77,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
)
);
- beamSqlEnv.registerTable("ORDER_DETAILS1", MockedBoundedTable
+ BEAM_SQL_ENV.registerTable("ORDER_DETAILS1", MockedBoundedTable
.of(Types.INTEGER, "order_id",
Types.VARCHAR, "buyer"
).addRows(
@@ -98,7 +97,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
+ " o1.order_id=o2.order_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
TestUtils.RowsBuilder.of(
@@ -124,7 +123,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
+ " o1.order_id=o2.order_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
TestUtils.RowsBuilder.of(
@@ -150,7 +149,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
+ " o1.order_id=o2.order_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
rows.apply(ParDo.of(new BeamSqlOutputToConsoleFn("helloworld")));
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
@@ -178,7 +177,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
+ " o1.order_id=o2.order_id"
;
pipeline.enableAbandonedNodeEnforcement(false);
- BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ compilePipeline(sql, pipeline, BEAM_SQL_ENV);
pipeline.run();
}
@@ -192,7 +191,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
+ " on "
+ " o1.order_id=o2.order_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
TestUtils.RowsBuilder.of(
@@ -220,7 +219,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
;
pipeline.enableAbandonedNodeEnforcement(false);
- BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ compilePipeline(sql, pipeline, BEAM_SQL_ENV);
pipeline.run();
}
@@ -235,7 +234,7 @@ public class BeamJoinRelUnboundedVsBoundedTest {
+ " o1.order_id=o2.order_id"
;
pipeline.enableAbandonedNodeEnforcement(false);
- BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ compilePipeline(sql, pipeline, BEAM_SQL_ENV);
pipeline.run();
}
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java
index a5a2e85..e5470ca 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java
@@ -20,9 +20,8 @@ package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
import java.util.Date;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.transform.BeamSqlOutputToConsoleFn;
import org.apache.beam.sdk.extensions.sql.mock.MockedUnboundedTable;
import org.apache.beam.sdk.testing.PAssert;
@@ -38,10 +37,10 @@ import org.junit.Test;
/**
* Unbounded + Unbounded Test for {@code BeamJoinRel}.
*/
-public class BeamJoinRelUnboundedVsUnboundedTest {
+public class BeamJoinRelUnboundedVsUnboundedTest extends BaseRelTest {
@Rule
public final TestPipeline pipeline = TestPipeline.create();
- private static final BeamSqlEnv beamSqlEnv = new BeamSqlEnv();
+ private static final BeamSqlEnv BEAM_SQL_ENV = new BeamSqlEnv();
public static final Date FIRST_DATE = new Date(1);
public static final Date SECOND_DATE = new Date(1 + 3600 * 1000);
@@ -49,7 +48,7 @@ public class BeamJoinRelUnboundedVsUnboundedTest {
@BeforeClass
public static void prepare() {
- beamSqlEnv.registerTable("ORDER_DETAILS", MockedUnboundedTable
+ BEAM_SQL_ENV.registerTable("ORDER_DETAILS", MockedUnboundedTable
.of(Types.INTEGER, "order_id",
Types.INTEGER, "site_id",
Types.INTEGER, "price",
@@ -88,7 +87,7 @@ public class BeamJoinRelUnboundedVsUnboundedTest {
+ " o1.order_id=o2.order_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
TestUtils.RowsBuilder.of(
@@ -121,7 +120,7 @@ public class BeamJoinRelUnboundedVsUnboundedTest {
// 2, 2 | 2, 5
// 3, 3 | NULL, NULL
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
TestUtils.RowsBuilder.of(
@@ -151,7 +150,7 @@ public class BeamJoinRelUnboundedVsUnboundedTest {
+ " o1.order_id=o2.order_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
TestUtils.RowsBuilder.of(
@@ -181,7 +180,7 @@ public class BeamJoinRelUnboundedVsUnboundedTest {
+ " o1.order_id1=o2.order_id"
;
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, BEAM_SQL_ENV);
rows.apply(ParDo.of(new BeamSqlOutputToConsoleFn("hello")));
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
@@ -213,7 +212,7 @@ public class BeamJoinRelUnboundedVsUnboundedTest {
+ " o1.order_id=o2.order_id"
;
pipeline.enableAbandonedNodeEnforcement(false);
- BeamSqlCli.compilePipeline(sql, pipeline, beamSqlEnv);
+ compilePipeline(sql, pipeline, BEAM_SQL_ENV);
pipeline.run();
}
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java
index 425e554..5c4ae2c 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java
@@ -19,9 +19,8 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
@@ -34,7 +33,7 @@ import org.junit.Test;
/**
* Test for {@code BeamMinusRel}.
*/
-public class BeamMinusRelTest {
+public class BeamMinusRelTest extends BaseRelTest {
static BeamSqlEnv sqlEnv = new BeamSqlEnv();
@Rule
@@ -78,7 +77,7 @@ public class BeamMinusRelTest {
+ "SELECT order_id, site_id, price "
+ "FROM ORDER_DETAILS2 ";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
@@ -100,7 +99,7 @@ public class BeamMinusRelTest {
+ "SELECT order_id, site_id, price "
+ "FROM ORDER_DETAILS2 ";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).satisfies(new CheckSize(2));
PAssert.that(rows).containsInAnyOrder(
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java
index 4de493a..cd0297a 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java
@@ -21,9 +21,8 @@ package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
import java.util.Date;
import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.testing.PAssert;
@@ -38,7 +37,7 @@ import org.junit.Test;
/**
* Test for {@code BeamSetOperatorRelBase}.
*/
-public class BeamSetOperatorRelBaseTest {
+public class BeamSetOperatorRelBaseTest extends BaseRelTest {
static BeamSqlEnv sqlEnv = new BeamSqlEnv();
@Rule
@@ -71,7 +70,7 @@ public class BeamSetOperatorRelBaseTest {
+ "FROM ORDER_DETAILS GROUP BY order_id, site_id"
+ ", TUMBLE(order_time, INTERVAL '1' HOUR) ";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
// compare valueInString to ignore the windowStart & windowEnd
PAssert.that(rows.apply(ParDo.of(new TestUtils.BeamSqlRow2StringDoFn())))
.containsInAnyOrder(
@@ -100,7 +99,7 @@ public class BeamSetOperatorRelBaseTest {
// use a real pipeline rather than the TestPipeline because we are
// testing exceptions, the pipeline will not actually run.
Pipeline pipeline1 = Pipeline.create(PipelineOptionsFactory.create());
- BeamSqlCli.compilePipeline(sql, pipeline1, sqlEnv);
+ compilePipeline(sql, pipeline1, sqlEnv);
pipeline.run();
}
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java
index f033fa0..19ba0d0 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java
@@ -20,9 +20,8 @@ package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
import java.util.Date;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
@@ -35,7 +34,7 @@ import org.junit.Test;
/**
* Test for {@code BeamSortRel}.
*/
-public class BeamSortRelTest {
+public class BeamSortRelTest extends BaseRelTest {
static BeamSqlEnv sqlEnv = new BeamSqlEnv();
@Rule
@@ -78,7 +77,7 @@ public class BeamSortRelTest {
+ "FROM ORDER_DETAILS "
+ "ORDER BY order_id asc, site_id desc limit 4";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
Types.INTEGER, "site_id",
@@ -117,7 +116,7 @@ public class BeamSortRelTest {
+ "FROM ORDER_DETAILS "
+ "ORDER BY order_id asc, site_id desc NULLS FIRST limit 4";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
@@ -155,7 +154,7 @@ public class BeamSortRelTest {
+ "FROM ORDER_DETAILS "
+ "ORDER BY order_id asc, site_id desc NULLS LAST limit 4";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
@@ -178,7 +177,7 @@ public class BeamSortRelTest {
+ "FROM ORDER_DETAILS "
+ "ORDER BY order_id asc, site_id desc limit 4 offset 4";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
@@ -201,7 +200,7 @@ public class BeamSortRelTest {
+ "FROM ORDER_DETAILS "
+ "ORDER BY order_id asc, site_id desc limit 11";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
@@ -232,6 +231,6 @@ public class BeamSortRelTest {
+ "ORDER BY order_id asc limit 11";
TestPipeline pipeline = TestPipeline.create();
- BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ compilePipeline(sql, pipeline, sqlEnv);
}
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java
index 7cc52da..d79a54e 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java
@@ -19,9 +19,8 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
@@ -34,7 +33,7 @@ import org.junit.Test;
/**
* Test for {@code BeamUnionRel}.
*/
-public class BeamUnionRelTest {
+public class BeamUnionRelTest extends BaseRelTest {
static BeamSqlEnv sqlEnv = new BeamSqlEnv();
@Rule
@@ -63,7 +62,7 @@ public class BeamUnionRelTest {
+ " order_id, site_id, price "
+ "FROM ORDER_DETAILS ";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
@@ -86,7 +85,7 @@ public class BeamUnionRelTest {
+ " SELECT order_id, site_id, price "
+ "FROM ORDER_DETAILS";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.BIGINT, "order_id",
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java
index ff31e55..5604e32 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java
@@ -19,9 +19,8 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.sql.Types;
-import org.apache.beam.sdk.extensions.sql.BeamSqlCli;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
@@ -34,7 +33,7 @@ import org.junit.Test;
/**
* Test for {@code BeamValuesRel}.
*/
-public class BeamValuesRelTest {
+public class BeamValuesRelTest extends BaseRelTest {
static BeamSqlEnv sqlEnv = new BeamSqlEnv();
@Rule
@@ -60,7 +59,7 @@ public class BeamValuesRelTest {
public void testValues() throws Exception {
String sql = "insert into string_table(name, description) values "
+ "('hello', 'world'), ('james', 'bond')";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.VARCHAR, "name",
@@ -76,7 +75,7 @@ public class BeamValuesRelTest {
@Test
public void testValues_castInt() throws Exception {
String sql = "insert into int_table (c0, c1) values(cast(1 as int), cast(2 as int))";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.INTEGER, "c0",
@@ -91,7 +90,7 @@ public class BeamValuesRelTest {
@Test
public void testValues_onlySelect() throws Exception {
String sql = "select 1, '1'";
- PCollection<BeamRecord> rows = BeamSqlCli.compilePipeline(sql, pipeline, sqlEnv);
+ PCollection<BeamRecord> rows = compilePipeline(sql, pipeline, sqlEnv);
PAssert.that(rows).containsInAnyOrder(
TestUtils.RowsBuilder.of(
Types.INTEGER, "EXPR$0",
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlRowCoderTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlRowCoderTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlRowCoderTest.java
new file mode 100644
index 0000000..0a320db
--- /dev/null
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlRowCoderTest.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema;
+
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import org.apache.beam.sdk.coders.BeamRecordCoder;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
+import org.apache.beam.sdk.testing.CoderProperties;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeSystem;
+import org.apache.calcite.rel.type.RelProtoDataType;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.junit.Test;
+
+/**
+ * Tests for BeamSqlRowCoder.
+ */
+public class BeamSqlRowCoderTest {
+
+ @Test
+ public void encodeAndDecode() throws Exception {
+ final RelProtoDataType protoRowType = new RelProtoDataType() {
+ @Override
+ public RelDataType apply(RelDataTypeFactory a0) {
+ return a0.builder()
+ .add("col_tinyint", SqlTypeName.TINYINT)
+ .add("col_smallint", SqlTypeName.SMALLINT)
+ .add("col_integer", SqlTypeName.INTEGER)
+ .add("col_bigint", SqlTypeName.BIGINT)
+ .add("col_float", SqlTypeName.FLOAT)
+ .add("col_double", SqlTypeName.DOUBLE)
+ .add("col_decimal", SqlTypeName.DECIMAL)
+ .add("col_string_varchar", SqlTypeName.VARCHAR)
+ .add("col_time", SqlTypeName.TIME)
+ .add("col_timestamp", SqlTypeName.TIMESTAMP)
+ .add("col_boolean", SqlTypeName.BOOLEAN)
+ .build();
+ }
+ };
+
+ BeamRecordSqlType beamSQLRowType = CalciteUtils.toBeamRowType(
+ protoRowType.apply(new JavaTypeFactoryImpl(
+ RelDataTypeSystem.DEFAULT)));
+
+ GregorianCalendar calendar = new GregorianCalendar();
+ calendar.setTime(new Date());
+ BeamRecord row = new BeamRecord(beamSQLRowType
+ , Byte.valueOf("1"), Short.valueOf("1"), 1, 1L, 1.1F, 1.1
+ , BigDecimal.ZERO, "hello", calendar, new Date(), true);
+
+
+ BeamRecordCoder coder = beamSQLRowType.getRecordCoder();
+ CoderProperties.coderDecodeEncodeEqual(coder, row);
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTableTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTableTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTableTest.java
new file mode 100644
index 0000000..fd88448
--- /dev/null
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTableTest.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema.kafka;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
+import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
+import org.apache.beam.sdk.testing.PAssert;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelProtoDataType;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.commons.csv.CSVFormat;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+
+/**
+ * Test for BeamKafkaCSVTable.
+ */
+public class BeamKafkaCSVTableTest {
+ @Rule
+ public TestPipeline pipeline = TestPipeline.create();
+ public static BeamRecord row1;
+ public static BeamRecord row2;
+
+ @BeforeClass
+ public static void setUp() {
+ row1 = new BeamRecord(genRowType(), 1L, 1, 1.0);
+
+ row2 = new BeamRecord(genRowType(), 2L, 2, 2.0);
+ }
+
+ @Test public void testCsvRecorderDecoder() throws Exception {
+ PCollection<BeamRecord> result = pipeline
+ .apply(
+ Create.of("1,\"1\",1.0", "2,2,2.0")
+ )
+ .apply(ParDo.of(new String2KvBytes()))
+ .apply(
+ new BeamKafkaCSVTable.CsvRecorderDecoder(genRowType(), CSVFormat.DEFAULT)
+ );
+
+ PAssert.that(result).containsInAnyOrder(row1, row2);
+
+ pipeline.run();
+ }
+
+ @Test public void testCsvRecorderEncoder() throws Exception {
+ PCollection<BeamRecord> result = pipeline
+ .apply(
+ Create.of(row1, row2)
+ )
+ .apply(
+ new BeamKafkaCSVTable.CsvRecorderEncoder(genRowType(), CSVFormat.DEFAULT)
+ ).apply(
+ new BeamKafkaCSVTable.CsvRecorderDecoder(genRowType(), CSVFormat.DEFAULT)
+ );
+
+ PAssert.that(result).containsInAnyOrder(row1, row2);
+
+ pipeline.run();
+ }
+
+ private static BeamRecordSqlType genRowType() {
+ return CalciteUtils.toBeamRowType(new RelProtoDataType() {
+
+ @Override public RelDataType apply(RelDataTypeFactory a0) {
+ return a0.builder().add("order_id", SqlTypeName.BIGINT)
+ .add("site_id", SqlTypeName.INTEGER)
+ .add("price", SqlTypeName.DOUBLE).build();
+ }
+ }.apply(BeamQueryPlanner.TYPE_FACTORY));
+ }
+
+ private static class String2KvBytes extends DoFn<String, KV<byte[], byte[]>>
+ implements Serializable {
+ @ProcessElement
+ public void processElement(ProcessContext ctx) {
+ ctx.output(KV.of(new byte[] {}, ctx.element().getBytes()));
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableTest.java
new file mode 100644
index 0000000..9a57a5f
--- /dev/null
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableTest.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema.text;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.nio.file.FileVisitResult;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.SimpleFileVisitor;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
+import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
+import org.apache.beam.sdk.testing.PAssert;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelProtoDataType;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+
+/**
+ * Tests for {@code BeamTextCSVTable}.
+ */
+public class BeamTextCSVTableTest {
+
+ @Rule public TestPipeline pipeline = TestPipeline.create();
+ @Rule public TestPipeline pipeline2 = TestPipeline.create();
+
+ /**
+ * testData.
+ *
+ * <p>
+ * The types of the csv fields are:
+ * integer,bigint,float,double,string
+ * </p>
+ */
+ private static Object[] data1 = new Object[] { 1, 1L, 1.1F, 1.1, "james" };
+ private static Object[] data2 = new Object[] { 2, 2L, 2.2F, 2.2, "bond" };
+
+ private static List<Object[]> testData = Arrays.asList(data1, data2);
+ private static List<BeamRecord> testDataRows = new ArrayList<BeamRecord>() {{
+ for (Object[] data : testData) {
+ add(buildRow(data));
+ }
+ }};
+
+ private static Path tempFolder;
+ private static File readerSourceFile;
+ private static File writerTargetFile;
+
+ @Test public void testBuildIOReader() {
+ PCollection<BeamRecord> rows = new BeamTextCSVTable(buildBeamSqlRowType(),
+ readerSourceFile.getAbsolutePath()).buildIOReader(pipeline);
+ PAssert.that(rows).containsInAnyOrder(testDataRows);
+ pipeline.run();
+ }
+
+ @Test public void testBuildIOWriter() {
+ new BeamTextCSVTable(buildBeamSqlRowType(),
+ readerSourceFile.getAbsolutePath()).buildIOReader(pipeline)
+ .apply(new BeamTextCSVTable(buildBeamSqlRowType(), writerTargetFile.getAbsolutePath())
+ .buildIOWriter());
+ pipeline.run();
+
+ PCollection<BeamRecord> rows = new BeamTextCSVTable(buildBeamSqlRowType(),
+ writerTargetFile.getAbsolutePath()).buildIOReader(pipeline2);
+
+ // confirm the two reads match
+ PAssert.that(rows).containsInAnyOrder(testDataRows);
+ pipeline2.run();
+ }
+
+ @BeforeClass public static void setUp() throws IOException {
+ tempFolder = Files.createTempDirectory("BeamTextTableTest");
+ readerSourceFile = writeToFile(testData, "readerSourceFile.txt");
+ writerTargetFile = writeToFile(testData, "writerTargetFile.txt");
+ }
+
+ @AfterClass public static void teardownClass() throws IOException {
+ Files.walkFileTree(tempFolder, new SimpleFileVisitor<Path>() {
+
+ @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
+ throws IOException {
+ Files.delete(file);
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc)
+ throws IOException {
+ Files.delete(dir);
+ return FileVisitResult.CONTINUE;
+ }
+ });
+ }
+
+ private static File writeToFile(List<Object[]> rows, String filename) throws IOException {
+ File file = tempFolder.resolve(filename).toFile();
+ OutputStream output = new FileOutputStream(file);
+ writeToStreamAndClose(rows, output);
+ return file;
+ }
+
+ /**
+ * Helper that writes the given lines (adding a newline in between) to a stream, then closes the
+ * stream.
+ */
+ private static void writeToStreamAndClose(List<Object[]> rows, OutputStream outputStream) {
+ try (PrintStream writer = new PrintStream(outputStream)) {
+ CSVPrinter printer = CSVFormat.DEFAULT.print(writer);
+ for (Object[] row : rows) {
+ for (Object field : row) {
+ printer.print(field);
+ }
+ printer.println();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private RelProtoDataType buildRowType() {
+ return new RelProtoDataType() {
+
+ @Override public RelDataType apply(RelDataTypeFactory a0) {
+ return a0.builder().add("id", SqlTypeName.INTEGER).add("order_id", SqlTypeName.BIGINT)
+ .add("price", SqlTypeName.FLOAT).add("amount", SqlTypeName.DOUBLE)
+ .add("user_name", SqlTypeName.VARCHAR).build();
+ }
+ };
+ }
+
+ private static RelDataType buildRelDataType() {
+ return BeamQueryPlanner.TYPE_FACTORY.builder().add("id", SqlTypeName.INTEGER)
+ .add("order_id", SqlTypeName.BIGINT).add("price", SqlTypeName.FLOAT)
+ .add("amount", SqlTypeName.DOUBLE).add("user_name", SqlTypeName.VARCHAR).build();
+ }
+
+ private static BeamRecordSqlType buildBeamSqlRowType() {
+ return CalciteUtils.toBeamRowType(buildRelDataType());
+ }
+
+ private static BeamRecord buildRow(Object[] data) {
+ return new BeamRecord(buildBeamSqlRowType(), Arrays.asList(data));
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamAggregationTransformTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamAggregationTransformTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamAggregationTransformTest.java
new file mode 100644
index 0000000..948e86c
--- /dev/null
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamAggregationTransformTest.java
@@ -0,0 +1,453 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema.transform;
+
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import org.apache.beam.sdk.coders.BeamRecordCoder;
+import org.apache.beam.sdk.coders.IterableCoder;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
+import org.apache.beam.sdk.extensions.sql.impl.transform.BeamAggregationTransforms;
+import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
+import org.apache.beam.sdk.testing.PAssert;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.Create;
+import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.WithKeys;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.calcite.rel.core.AggregateCall;
+import org.apache.calcite.rel.type.RelDataTypeFactory.FieldInfoBuilder;
+import org.apache.calcite.rel.type.RelDataTypeSystem;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.sql.fun.SqlAvgAggFunction;
+import org.apache.calcite.sql.fun.SqlCountAggFunction;
+import org.apache.calcite.sql.fun.SqlMinMaxAggFunction;
+import org.apache.calcite.sql.fun.SqlSumAggFunction;
+import org.apache.calcite.sql.type.BasicSqlType;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.util.ImmutableBitSet;
+import org.junit.Rule;
+import org.junit.Test;
+
+/**
+ * Unit tests for {@link BeamAggregationTransforms}.
+ *
+ */
+public class BeamAggregationTransformTest extends BeamTransformBaseTest{
+
+ @Rule
+ public TestPipeline p = TestPipeline.create();
+
+ private List<AggregateCall> aggCalls;
+
+ private BeamRecordSqlType keyType;
+ private BeamRecordSqlType aggPartType;
+ private BeamRecordSqlType outputType;
+
+ private BeamRecordCoder inRecordCoder;
+ private BeamRecordCoder keyCoder;
+ private BeamRecordCoder aggCoder;
+ private BeamRecordCoder outRecordCoder;
+
+ /**
+ * This step equals to below query.
+ * <pre>
+ * SELECT `f_int`
+ * , COUNT(*) AS `size`
+ * , SUM(`f_long`) AS `sum1`, AVG(`f_long`) AS `avg1`
+ * , MAX(`f_long`) AS `max1`, MIN(`f_long`) AS `min1`
+ * , SUM(`f_short`) AS `sum2`, AVG(`f_short`) AS `avg2`
+ * , MAX(`f_short`) AS `max2`, MIN(`f_short`) AS `min2`
+ * , SUM(`f_byte`) AS `sum3`, AVG(`f_byte`) AS `avg3`
+ * , MAX(`f_byte`) AS `max3`, MIN(`f_byte`) AS `min3`
+ * , SUM(`f_float`) AS `sum4`, AVG(`f_float`) AS `avg4`
+ * , MAX(`f_float`) AS `max4`, MIN(`f_float`) AS `min4`
+ * , SUM(`f_double`) AS `sum5`, AVG(`f_double`) AS `avg5`
+ * , MAX(`f_double`) AS `max5`, MIN(`f_double`) AS `min5`
+ * , MAX(`f_timestamp`) AS `max7`, MIN(`f_timestamp`) AS `min7`
+ * ,SUM(`f_int2`) AS `sum8`, AVG(`f_int2`) AS `avg8`
+ * , MAX(`f_int2`) AS `max8`, MIN(`f_int2`) AS `min8`
+ * FROM TABLE_NAME
+ * GROUP BY `f_int`
+ * </pre>
+ * @throws ParseException
+ */
+ @Test
+ public void testCountPerElementBasic() throws ParseException {
+ setupEnvironment();
+
+ PCollection<BeamRecord> input = p.apply(Create.of(inputRows));
+
+ //1. extract fields in group-by key part
+ PCollection<KV<BeamRecord, BeamRecord>> exGroupByStream = input.apply("exGroupBy",
+ WithKeys
+ .of(new BeamAggregationTransforms.AggregationGroupByKeyFn(-1, ImmutableBitSet.of(0))))
+ .setCoder(KvCoder.<BeamRecord, BeamRecord>of(keyCoder, inRecordCoder));
+
+ //2. apply a GroupByKey.
+ PCollection<KV<BeamRecord, Iterable<BeamRecord>>> groupedStream = exGroupByStream
+ .apply("groupBy", GroupByKey.<BeamRecord, BeamRecord>create())
+ .setCoder(KvCoder.<BeamRecord, Iterable<BeamRecord>>of(keyCoder,
+ IterableCoder.<BeamRecord>of(inRecordCoder)));
+
+ //3. run aggregation functions
+ PCollection<KV<BeamRecord, BeamRecord>> aggregatedStream = groupedStream.apply("aggregation",
+ Combine.<BeamRecord, BeamRecord, BeamRecord>groupedValues(
+ new BeamAggregationTransforms.AggregationAdaptor(aggCalls, inputRowType)))
+ .setCoder(KvCoder.<BeamRecord, BeamRecord>of(keyCoder, aggCoder));
+
+ //4. flat KV to a single record
+ PCollection<BeamRecord> mergedStream = aggregatedStream.apply("mergeRecord",
+ ParDo.of(new BeamAggregationTransforms.MergeAggregationRecord(outputType, aggCalls, -1)));
+ mergedStream.setCoder(outRecordCoder);
+
+ //assert function BeamAggregationTransform.AggregationGroupByKeyFn
+ PAssert.that(exGroupByStream).containsInAnyOrder(prepareResultOfAggregationGroupByKeyFn());
+
+ //assert BeamAggregationTransform.AggregationCombineFn
+ PAssert.that(aggregatedStream).containsInAnyOrder(prepareResultOfAggregationCombineFn());
+
+ //assert BeamAggregationTransform.MergeAggregationRecord
+ PAssert.that(mergedStream).containsInAnyOrder(prepareResultOfMergeAggregationRecord());
+
+ p.run();
+}
+
+ private void setupEnvironment() {
+ prepareAggregationCalls();
+ prepareTypeAndCoder();
+ }
+
+ /**
+ * create list of all {@link AggregateCall}.
+ */
+ @SuppressWarnings("deprecation")
+ private void prepareAggregationCalls() {
+ //aggregations for all data type
+ aggCalls = new ArrayList<>();
+ aggCalls.add(
+ new AggregateCall(new SqlCountAggFunction(), false,
+ Arrays.<Integer>asList(),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
+ "count")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlSumAggFunction(
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT)), false,
+ Arrays.<Integer>asList(1),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
+ "sum1")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
+ Arrays.<Integer>asList(1),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
+ "avg1")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
+ Arrays.<Integer>asList(1),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
+ "max1")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
+ Arrays.<Integer>asList(1),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.BIGINT),
+ "min1")
+ );
+
+ aggCalls.add(
+ new AggregateCall(new SqlSumAggFunction(
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT)), false,
+ Arrays.<Integer>asList(2),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
+ "sum2")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
+ Arrays.<Integer>asList(2),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
+ "avg2")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
+ Arrays.<Integer>asList(2),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
+ "max2")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
+ Arrays.<Integer>asList(2),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.SMALLINT),
+ "min2")
+ );
+
+ aggCalls.add(
+ new AggregateCall(
+ new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT)),
+ false,
+ Arrays.<Integer>asList(3),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
+ "sum3")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
+ Arrays.<Integer>asList(3),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
+ "avg3")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
+ Arrays.<Integer>asList(3),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
+ "max3")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
+ Arrays.<Integer>asList(3),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TINYINT),
+ "min3")
+ );
+
+ aggCalls.add(
+ new AggregateCall(
+ new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT)),
+ false,
+ Arrays.<Integer>asList(4),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
+ "sum4")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
+ Arrays.<Integer>asList(4),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
+ "avg4")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
+ Arrays.<Integer>asList(4),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
+ "max4")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
+ Arrays.<Integer>asList(4),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.FLOAT),
+ "min4")
+ );
+
+ aggCalls.add(
+ new AggregateCall(
+ new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE)),
+ false,
+ Arrays.<Integer>asList(5),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
+ "sum5")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
+ Arrays.<Integer>asList(5),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
+ "avg5")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
+ Arrays.<Integer>asList(5),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
+ "max5")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
+ Arrays.<Integer>asList(5),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.DOUBLE),
+ "min5")
+ );
+
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
+ Arrays.<Integer>asList(7),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TIMESTAMP),
+ "max7")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
+ Arrays.<Integer>asList(7),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.TIMESTAMP),
+ "min7")
+ );
+
+ aggCalls.add(
+ new AggregateCall(
+ new SqlSumAggFunction(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER)),
+ false,
+ Arrays.<Integer>asList(8),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
+ "sum8")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlAvgAggFunction(SqlKind.AVG), false,
+ Arrays.<Integer>asList(8),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
+ "avg8")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MAX), false,
+ Arrays.<Integer>asList(8),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
+ "max8")
+ );
+ aggCalls.add(
+ new AggregateCall(new SqlMinMaxAggFunction(SqlKind.MIN), false,
+ Arrays.<Integer>asList(8),
+ new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
+ "min8")
+ );
+ }
+
+ /**
+ * Coders used in aggregation steps.
+ */
+ private void prepareTypeAndCoder() {
+ inRecordCoder = inputRowType.getRecordCoder();
+
+ keyType = initTypeOfSqlRow(Arrays.asList(KV.of("f_int", SqlTypeName.INTEGER)));
+ keyCoder = keyType.getRecordCoder();
+
+ aggPartType = initTypeOfSqlRow(
+ Arrays.asList(KV.of("count", SqlTypeName.BIGINT),
+
+ KV.of("sum1", SqlTypeName.BIGINT), KV.of("avg1", SqlTypeName.BIGINT),
+ KV.of("max1", SqlTypeName.BIGINT), KV.of("min1", SqlTypeName.BIGINT),
+
+ KV.of("sum2", SqlTypeName.SMALLINT), KV.of("avg2", SqlTypeName.SMALLINT),
+ KV.of("max2", SqlTypeName.SMALLINT), KV.of("min2", SqlTypeName.SMALLINT),
+
+ KV.of("sum3", SqlTypeName.TINYINT), KV.of("avg3", SqlTypeName.TINYINT),
+ KV.of("max3", SqlTypeName.TINYINT), KV.of("min3", SqlTypeName.TINYINT),
+
+ KV.of("sum4", SqlTypeName.FLOAT), KV.of("avg4", SqlTypeName.FLOAT),
+ KV.of("max4", SqlTypeName.FLOAT), KV.of("min4", SqlTypeName.FLOAT),
+
+ KV.of("sum5", SqlTypeName.DOUBLE), KV.of("avg5", SqlTypeName.DOUBLE),
+ KV.of("max5", SqlTypeName.DOUBLE), KV.of("min5", SqlTypeName.DOUBLE),
+
+ KV.of("max7", SqlTypeName.TIMESTAMP), KV.of("min7", SqlTypeName.TIMESTAMP),
+
+ KV.of("sum8", SqlTypeName.INTEGER), KV.of("avg8", SqlTypeName.INTEGER),
+ KV.of("max8", SqlTypeName.INTEGER), KV.of("min8", SqlTypeName.INTEGER)
+ ));
+ aggCoder = aggPartType.getRecordCoder();
+
+ outputType = prepareFinalRowType();
+ outRecordCoder = outputType.getRecordCoder();
+ }
+
+ /**
+ * expected results after {@link BeamAggregationTransforms.AggregationGroupByKeyFn}.
+ */
+ private List<KV<BeamRecord, BeamRecord>> prepareResultOfAggregationGroupByKeyFn() {
+ return Arrays.asList(
+ KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(0).getInteger(0))),
+ inputRows.get(0)),
+ KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(1).getInteger(0))),
+ inputRows.get(1)),
+ KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(2).getInteger(0))),
+ inputRows.get(2)),
+ KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(3).getInteger(0))),
+ inputRows.get(3)));
+ }
+
+ /**
+ * expected results after {@link BeamAggregationTransforms.AggregationCombineFn}.
+ */
+ private List<KV<BeamRecord, BeamRecord>> prepareResultOfAggregationCombineFn()
+ throws ParseException {
+ return Arrays.asList(
+ KV.of(new BeamRecord(keyType, Arrays.<Object>asList(inputRows.get(0).getInteger(0))),
+ new BeamRecord(aggPartType, Arrays.<Object>asList(
+ 4L,
+ 10000L, 2500L, 4000L, 1000L,
+ (short) 10, (short) 2, (short) 4, (short) 1,
+ (byte) 10, (byte) 2, (byte) 4, (byte) 1,
+ 10.0F, 2.5F, 4.0F, 1.0F,
+ 10.0, 2.5, 4.0, 1.0,
+ format.parse("2017-01-01 02:04:03"), format.parse("2017-01-01 01:01:03"),
+ 10, 2, 4, 1
+ )))
+ );
+ }
+
+ /**
+ * Row type of final output row.
+ */
+ private BeamRecordSqlType prepareFinalRowType() {
+ FieldInfoBuilder builder = BeamQueryPlanner.TYPE_FACTORY.builder();
+ List<KV<String, SqlTypeName>> columnMetadata =
+ Arrays.asList(KV.of("f_int", SqlTypeName.INTEGER), KV.of("count", SqlTypeName.BIGINT),
+
+ KV.of("sum1", SqlTypeName.BIGINT), KV.of("avg1", SqlTypeName.BIGINT),
+ KV.of("max1", SqlTypeName.BIGINT), KV.of("min1", SqlTypeName.BIGINT),
+
+ KV.of("sum2", SqlTypeName.SMALLINT), KV.of("avg2", SqlTypeName.SMALLINT),
+ KV.of("max2", SqlTypeName.SMALLINT), KV.of("min2", SqlTypeName.SMALLINT),
+
+ KV.of("sum3", SqlTypeName.TINYINT), KV.of("avg3", SqlTypeName.TINYINT),
+ KV.of("max3", SqlTypeName.TINYINT), KV.of("min3", SqlTypeName.TINYINT),
+
+ KV.of("sum4", SqlTypeName.FLOAT), KV.of("avg4", SqlTypeName.FLOAT),
+ KV.of("max4", SqlTypeName.FLOAT), KV.of("min4", SqlTypeName.FLOAT),
+
+ KV.of("sum5", SqlTypeName.DOUBLE), KV.of("avg5", SqlTypeName.DOUBLE),
+ KV.of("max5", SqlTypeName.DOUBLE), KV.of("min5", SqlTypeName.DOUBLE),
+
+ KV.of("max7", SqlTypeName.TIMESTAMP), KV.of("min7", SqlTypeName.TIMESTAMP),
+
+ KV.of("sum8", SqlTypeName.INTEGER), KV.of("avg8", SqlTypeName.INTEGER),
+ KV.of("max8", SqlTypeName.INTEGER), KV.of("min8", SqlTypeName.INTEGER)
+ );
+ for (KV<String, SqlTypeName> cm : columnMetadata) {
+ builder.add(cm.getKey(), cm.getValue());
+ }
+ return CalciteUtils.toBeamRowType(builder.build());
+ }
+
+ /**
+ * expected results after {@link BeamAggregationTransforms.MergeAggregationRecord}.
+ */
+ private BeamRecord prepareResultOfMergeAggregationRecord() throws ParseException {
+ return new BeamRecord(outputType, Arrays.<Object>asList(
+ 1, 4L,
+ 10000L, 2500L, 4000L, 1000L,
+ (short) 10, (short) 2, (short) 4, (short) 1,
+ (byte) 10, (byte) 2, (byte) 4, (byte) 1,
+ 10.0F, 2.5F, 4.0F, 1.0F,
+ 10.0, 2.5, 4.0, 1.0,
+ format.parse("2017-01-01 02:04:03"), format.parse("2017-01-01 01:01:03"),
+ 10, 2, 4, 1
+ ));
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamTransformBaseTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamTransformBaseTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamTransformBaseTest.java
new file mode 100644
index 0000000..3c8f040
--- /dev/null
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/schema/transform/BeamTransformBaseTest.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema.transform;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.List;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
+import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.KV;
+import org.apache.calcite.rel.type.RelDataTypeFactory.FieldInfoBuilder;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.junit.BeforeClass;
+
+/**
+ * shared methods to test PTransforms which execute Beam SQL steps.
+ *
+ */
+public class BeamTransformBaseTest {
+ public static DateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+ public static BeamRecordSqlType inputRowType;
+ public static List<BeamRecord> inputRows;
+
+ @BeforeClass
+ public static void prepareInput() throws NumberFormatException, ParseException{
+ List<KV<String, SqlTypeName>> columnMetadata = Arrays.asList(
+ KV.of("f_int", SqlTypeName.INTEGER), KV.of("f_long", SqlTypeName.BIGINT),
+ KV.of("f_short", SqlTypeName.SMALLINT), KV.of("f_byte", SqlTypeName.TINYINT),
+ KV.of("f_float", SqlTypeName.FLOAT), KV.of("f_double", SqlTypeName.DOUBLE),
+ KV.of("f_string", SqlTypeName.VARCHAR), KV.of("f_timestamp", SqlTypeName.TIMESTAMP),
+ KV.of("f_int2", SqlTypeName.INTEGER)
+ );
+ inputRowType = initTypeOfSqlRow(columnMetadata);
+ inputRows = Arrays.asList(
+ initBeamSqlRow(columnMetadata,
+ Arrays.<Object>asList(1, 1000L, Short.valueOf("1"), Byte.valueOf("1"), 1.0F, 1.0,
+ "string_row1", format.parse("2017-01-01 01:01:03"), 1)),
+ initBeamSqlRow(columnMetadata,
+ Arrays.<Object>asList(1, 2000L, Short.valueOf("2"), Byte.valueOf("2"), 2.0F, 2.0,
+ "string_row2", format.parse("2017-01-01 01:02:03"), 2)),
+ initBeamSqlRow(columnMetadata,
+ Arrays.<Object>asList(1, 3000L, Short.valueOf("3"), Byte.valueOf("3"), 3.0F, 3.0,
+ "string_row3", format.parse("2017-01-01 01:03:03"), 3)),
+ initBeamSqlRow(columnMetadata, Arrays.<Object>asList(1, 4000L, Short.valueOf("4"),
+ Byte.valueOf("4"), 4.0F, 4.0, "string_row4", format.parse("2017-01-01 02:04:03"), 4)));
+ }
+
+ /**
+ * create a {@code BeamSqlRowType} for given column metadata.
+ */
+ public static BeamRecordSqlType initTypeOfSqlRow(List<KV<String, SqlTypeName>> columnMetadata){
+ FieldInfoBuilder builder = BeamQueryPlanner.TYPE_FACTORY.builder();
+ for (KV<String, SqlTypeName> cm : columnMetadata) {
+ builder.add(cm.getKey(), cm.getValue());
+ }
+ return CalciteUtils.toBeamRowType(builder.build());
+ }
+
+ /**
+ * Create an empty row with given column metadata.
+ */
+ public static BeamRecord initBeamSqlRow(List<KV<String, SqlTypeName>> columnMetadata) {
+ return initBeamSqlRow(columnMetadata, Arrays.asList());
+ }
+
+ /**
+ * Create a row with given column metadata, and values for each column.
+ *
+ */
+ public static BeamRecord initBeamSqlRow(List<KV<String, SqlTypeName>> columnMetadata,
+ List<Object> rowValues){
+ BeamRecordSqlType rowType = initTypeOfSqlRow(columnMetadata);
+
+ return new BeamRecord(rowType, rowValues);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java
index 5898e2e..a64afa6 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java
@@ -29,10 +29,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
import org.apache.beam.sdk.extensions.sql.BeamSql;
import org.apache.beam.sdk.extensions.sql.TestUtils;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.values.BeamRecord;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java
index 4ce2f45..a836f79 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java
@@ -21,8 +21,8 @@ package org.apache.beam.sdk.extensions.sql.integrationtest;
import java.math.BigDecimal;
import java.sql.Types;
import java.util.Arrays;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java
index 60e8211..cf66268 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java
@@ -25,8 +25,8 @@ import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.extensions.sql.schema.BeamIOType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamIOType;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java
index 426789c..d661866 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java
@@ -19,8 +19,8 @@
package org.apache.beam.sdk.extensions.sql.mock;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.beam.sdk.extensions.sql.schema.BaseBeamTable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BaseBeamTable;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java
index 465705d..31234e1 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java
@@ -22,9 +22,9 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamIOType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamIOType;
import org.apache.beam.sdk.testing.TestStream;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
[5/5] beam git commit: [BEAM-2740] This closes #3708
Posted by ta...@apache.org.
[BEAM-2740] This closes #3708
Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/a1cc5518
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/a1cc5518
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/a1cc5518
Branch: refs/heads/DSL_SQL
Commit: a1cc5518e0c1cc83c33c3e2e87128fc59935a314
Parents: 9eec6a0 49aad92
Author: Tyler Akidau <ta...@apache.org>
Authored: Tue Aug 15 11:40:59 2017 -0700
Committer: Tyler Akidau <ta...@apache.org>
Committed: Tue Aug 15 11:40:59 2017 -0700
----------------------------------------------------------------------
.../sdk/extensions/sql/BeamRecordSqlType.java | 185 ++++++++
.../apache/beam/sdk/extensions/sql/BeamSql.java | 113 ++---
.../beam/sdk/extensions/sql/BeamSqlCli.java | 65 ---
.../beam/sdk/extensions/sql/BeamSqlEnv.java | 127 ------
.../sdk/extensions/sql/BeamSqlRecordHelper.java | 217 +++++++++
.../beam/sdk/extensions/sql/BeamSqlUdf.java | 41 ++
.../extensions/sql/example/BeamSqlExample.java | 2 +-
.../sdk/extensions/sql/impl/BeamSqlCli.java | 65 +++
.../sdk/extensions/sql/impl/BeamSqlEnv.java | 135 ++++++
.../sdk/extensions/sql/impl/package-info.java | 22 +
.../sql/impl/planner/BeamQueryPlanner.java | 9 +-
.../sql/impl/rel/BeamAggregationRel.java | 4 +-
.../extensions/sql/impl/rel/BeamFilterRel.java | 2 +-
.../extensions/sql/impl/rel/BeamIOSinkRel.java | 6 +-
.../sql/impl/rel/BeamIOSourceRel.java | 6 +-
.../sql/impl/rel/BeamIntersectRel.java | 2 +-
.../extensions/sql/impl/rel/BeamJoinRel.java | 4 +-
.../extensions/sql/impl/rel/BeamMinusRel.java | 2 +-
.../extensions/sql/impl/rel/BeamProjectRel.java | 2 +-
.../extensions/sql/impl/rel/BeamRelNode.java | 5 +-
.../sql/impl/rel/BeamSetOperatorRelBase.java | 2 +-
.../extensions/sql/impl/rel/BeamSortRel.java | 4 +-
.../extensions/sql/impl/rel/BeamUnionRel.java | 2 +-
.../extensions/sql/impl/rel/BeamValuesRel.java | 6 +-
.../sql/impl/schema/BaseBeamTable.java | 35 ++
.../extensions/sql/impl/schema/BeamIOType.java | 28 ++
.../sql/impl/schema/BeamPCollectionTable.java | 63 +++
.../sql/impl/schema/BeamSqlTable.java | 54 +++
.../sql/impl/schema/BeamTableUtils.java | 118 +++++
.../impl/schema/kafka/BeamKafkaCSVTable.java | 109 +++++
.../sql/impl/schema/kafka/BeamKafkaTable.java | 109 +++++
.../sql/impl/schema/kafka/package-info.java | 22 +
.../sql/impl/schema/package-info.java | 22 +
.../sql/impl/schema/text/BeamTextCSVTable.java | 70 +++
.../schema/text/BeamTextCSVTableIOReader.java | 58 +++
.../schema/text/BeamTextCSVTableIOWriter.java | 58 +++
.../sql/impl/schema/text/BeamTextTable.java | 41 ++
.../sql/impl/schema/text/package-info.java | 22 +
.../transform/BeamAggregationTransforms.java | 4 +-
.../sql/impl/transform/BeamJoinTransforms.java | 4 +-
.../sql/impl/transform/BeamSqlProjectFn.java | 4 +-
.../extensions/sql/impl/utils/CalciteUtils.java | 2 +-
.../extensions/sql/schema/BaseBeamTable.java | 34 --
.../sdk/extensions/sql/schema/BeamIOType.java | 28 --
.../sql/schema/BeamPCollectionTable.java | 62 ---
.../sql/schema/BeamRecordSqlType.java | 185 --------
.../sql/schema/BeamSqlRecordHelper.java | 217 ---------
.../sdk/extensions/sql/schema/BeamSqlTable.java | 53 ---
.../sdk/extensions/sql/schema/BeamSqlUdf.java | 41 --
.../extensions/sql/schema/BeamTableUtils.java | 117 -----
.../sql/schema/kafka/BeamKafkaCSVTable.java | 109 -----
.../sql/schema/kafka/BeamKafkaTable.java | 109 -----
.../sql/schema/kafka/package-info.java | 22 -
.../sdk/extensions/sql/schema/package-info.java | 22 -
.../sql/schema/text/BeamTextCSVTable.java | 70 ---
.../schema/text/BeamTextCSVTableIOReader.java | 58 ---
.../schema/text/BeamTextCSVTableIOWriter.java | 58 ---
.../sql/schema/text/BeamTextTable.java | 41 --
.../sql/schema/text/package-info.java | 22 -
.../extensions/sql/BeamSqlApiSurfaceTest.java | 12 +-
.../sql/BeamSqlDslAggregationTest.java | 1 -
.../beam/sdk/extensions/sql/BeamSqlDslBase.java | 1 -
.../sdk/extensions/sql/BeamSqlDslJoinTest.java | 1 -
.../extensions/sql/BeamSqlDslProjectTest.java | 1 -
.../extensions/sql/BeamSqlDslUdfUdafTest.java | 2 -
.../beam/sdk/extensions/sql/TestUtils.java | 1 -
.../interpreter/BeamSqlFnExecutorTestBase.java | 2 +-
.../extensions/sql/impl/rel/BaseRelTest.java | 34 ++
.../sql/impl/rel/BeamIntersectRelTest.java | 9 +-
.../rel/BeamJoinRelBoundedVsBoundedTest.java | 23 +-
.../rel/BeamJoinRelUnboundedVsBoundedTest.java | 25 +-
.../BeamJoinRelUnboundedVsUnboundedTest.java | 19 +-
.../sql/impl/rel/BeamMinusRelTest.java | 9 +-
.../impl/rel/BeamSetOperatorRelBaseTest.java | 9 +-
.../sql/impl/rel/BeamSortRelTest.java | 17 +-
.../sql/impl/rel/BeamUnionRelTest.java | 9 +-
.../sql/impl/rel/BeamValuesRelTest.java | 11 +-
.../sql/impl/schema/BeamSqlRowCoderTest.java | 77 ++++
.../schema/kafka/BeamKafkaCSVTableTest.java | 107 +++++
.../impl/schema/text/BeamTextCSVTableTest.java | 176 +++++++
.../transform/BeamAggregationTransformTest.java | 453 +++++++++++++++++++
.../schema/transform/BeamTransformBaseTest.java | 97 ++++
...mSqlBuiltinFunctionsIntegrationTestBase.java | 2 +-
...amSqlComparisonOperatorsIntegrationTest.java | 2 +-
.../extensions/sql/mock/MockedBoundedTable.java | 4 +-
.../sdk/extensions/sql/mock/MockedTable.java | 4 +-
.../sql/mock/MockedUnboundedTable.java | 4 +-
.../sql/schema/BeamSqlRowCoderTest.java | 76 ----
.../sql/schema/kafka/BeamKafkaCSVTableTest.java | 107 -----
.../sql/schema/text/BeamTextCSVTableTest.java | 176 -------
.../transform/BeamAggregationTransformTest.java | 453 -------------------
.../schema/transform/BeamTransformBaseTest.java | 97 ----
92 files changed, 2575 insertions(+), 2545 deletions(-)
----------------------------------------------------------------------
[4/5] beam git commit: [BEAM-2740] Hide BeamSqlEnv.
Posted by ta...@apache.org.
[BEAM-2740] Hide BeamSqlEnv.
Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/49aad927
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/49aad927
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/49aad927
Branch: refs/heads/DSL_SQL
Commit: 49aad927d4d9cf58c30c04641c766a62d44f44b7
Parents: 9eec6a0
Author: James Xu <xu...@gmail.com>
Authored: Wed Aug 9 18:54:54 2017 +0800
Committer: Tyler Akidau <ta...@apache.org>
Committed: Tue Aug 15 11:40:39 2017 -0700
----------------------------------------------------------------------
.../sdk/extensions/sql/BeamRecordSqlType.java | 185 ++++++++
.../apache/beam/sdk/extensions/sql/BeamSql.java | 113 ++---
.../beam/sdk/extensions/sql/BeamSqlCli.java | 65 ---
.../beam/sdk/extensions/sql/BeamSqlEnv.java | 127 ------
.../sdk/extensions/sql/BeamSqlRecordHelper.java | 217 +++++++++
.../beam/sdk/extensions/sql/BeamSqlUdf.java | 41 ++
.../extensions/sql/example/BeamSqlExample.java | 2 +-
.../sdk/extensions/sql/impl/BeamSqlCli.java | 65 +++
.../sdk/extensions/sql/impl/BeamSqlEnv.java | 135 ++++++
.../sdk/extensions/sql/impl/package-info.java | 22 +
.../sql/impl/planner/BeamQueryPlanner.java | 9 +-
.../sql/impl/rel/BeamAggregationRel.java | 4 +-
.../extensions/sql/impl/rel/BeamFilterRel.java | 2 +-
.../extensions/sql/impl/rel/BeamIOSinkRel.java | 6 +-
.../sql/impl/rel/BeamIOSourceRel.java | 6 +-
.../sql/impl/rel/BeamIntersectRel.java | 2 +-
.../extensions/sql/impl/rel/BeamJoinRel.java | 4 +-
.../extensions/sql/impl/rel/BeamMinusRel.java | 2 +-
.../extensions/sql/impl/rel/BeamProjectRel.java | 2 +-
.../extensions/sql/impl/rel/BeamRelNode.java | 5 +-
.../sql/impl/rel/BeamSetOperatorRelBase.java | 2 +-
.../extensions/sql/impl/rel/BeamSortRel.java | 4 +-
.../extensions/sql/impl/rel/BeamUnionRel.java | 2 +-
.../extensions/sql/impl/rel/BeamValuesRel.java | 6 +-
.../sql/impl/schema/BaseBeamTable.java | 35 ++
.../extensions/sql/impl/schema/BeamIOType.java | 28 ++
.../sql/impl/schema/BeamPCollectionTable.java | 63 +++
.../sql/impl/schema/BeamSqlTable.java | 54 +++
.../sql/impl/schema/BeamTableUtils.java | 118 +++++
.../impl/schema/kafka/BeamKafkaCSVTable.java | 109 +++++
.../sql/impl/schema/kafka/BeamKafkaTable.java | 109 +++++
.../sql/impl/schema/kafka/package-info.java | 22 +
.../sql/impl/schema/package-info.java | 22 +
.../sql/impl/schema/text/BeamTextCSVTable.java | 70 +++
.../schema/text/BeamTextCSVTableIOReader.java | 58 +++
.../schema/text/BeamTextCSVTableIOWriter.java | 58 +++
.../sql/impl/schema/text/BeamTextTable.java | 41 ++
.../sql/impl/schema/text/package-info.java | 22 +
.../transform/BeamAggregationTransforms.java | 4 +-
.../sql/impl/transform/BeamJoinTransforms.java | 4 +-
.../sql/impl/transform/BeamSqlProjectFn.java | 4 +-
.../extensions/sql/impl/utils/CalciteUtils.java | 2 +-
.../extensions/sql/schema/BaseBeamTable.java | 34 --
.../sdk/extensions/sql/schema/BeamIOType.java | 28 --
.../sql/schema/BeamPCollectionTable.java | 62 ---
.../sql/schema/BeamRecordSqlType.java | 185 --------
.../sql/schema/BeamSqlRecordHelper.java | 217 ---------
.../sdk/extensions/sql/schema/BeamSqlTable.java | 53 ---
.../sdk/extensions/sql/schema/BeamSqlUdf.java | 41 --
.../extensions/sql/schema/BeamTableUtils.java | 117 -----
.../sql/schema/kafka/BeamKafkaCSVTable.java | 109 -----
.../sql/schema/kafka/BeamKafkaTable.java | 109 -----
.../sql/schema/kafka/package-info.java | 22 -
.../sdk/extensions/sql/schema/package-info.java | 22 -
.../sql/schema/text/BeamTextCSVTable.java | 70 ---
.../schema/text/BeamTextCSVTableIOReader.java | 58 ---
.../schema/text/BeamTextCSVTableIOWriter.java | 58 ---
.../sql/schema/text/BeamTextTable.java | 41 --
.../sql/schema/text/package-info.java | 22 -
.../extensions/sql/BeamSqlApiSurfaceTest.java | 12 +-
.../sql/BeamSqlDslAggregationTest.java | 1 -
.../beam/sdk/extensions/sql/BeamSqlDslBase.java | 1 -
.../sdk/extensions/sql/BeamSqlDslJoinTest.java | 1 -
.../extensions/sql/BeamSqlDslProjectTest.java | 1 -
.../extensions/sql/BeamSqlDslUdfUdafTest.java | 2 -
.../beam/sdk/extensions/sql/TestUtils.java | 1 -
.../interpreter/BeamSqlFnExecutorTestBase.java | 2 +-
.../extensions/sql/impl/rel/BaseRelTest.java | 34 ++
.../sql/impl/rel/BeamIntersectRelTest.java | 9 +-
.../rel/BeamJoinRelBoundedVsBoundedTest.java | 23 +-
.../rel/BeamJoinRelUnboundedVsBoundedTest.java | 25 +-
.../BeamJoinRelUnboundedVsUnboundedTest.java | 19 +-
.../sql/impl/rel/BeamMinusRelTest.java | 9 +-
.../impl/rel/BeamSetOperatorRelBaseTest.java | 9 +-
.../sql/impl/rel/BeamSortRelTest.java | 17 +-
.../sql/impl/rel/BeamUnionRelTest.java | 9 +-
.../sql/impl/rel/BeamValuesRelTest.java | 11 +-
.../sql/impl/schema/BeamSqlRowCoderTest.java | 77 ++++
.../schema/kafka/BeamKafkaCSVTableTest.java | 107 +++++
.../impl/schema/text/BeamTextCSVTableTest.java | 176 +++++++
.../transform/BeamAggregationTransformTest.java | 453 +++++++++++++++++++
.../schema/transform/BeamTransformBaseTest.java | 97 ++++
...mSqlBuiltinFunctionsIntegrationTestBase.java | 2 +-
...amSqlComparisonOperatorsIntegrationTest.java | 2 +-
.../extensions/sql/mock/MockedBoundedTable.java | 4 +-
.../sdk/extensions/sql/mock/MockedTable.java | 4 +-
.../sql/mock/MockedUnboundedTable.java | 4 +-
.../sql/schema/BeamSqlRowCoderTest.java | 76 ----
.../sql/schema/kafka/BeamKafkaCSVTableTest.java | 107 -----
.../sql/schema/text/BeamTextCSVTableTest.java | 176 -------
.../transform/BeamAggregationTransformTest.java | 453 -------------------
.../schema/transform/BeamTransformBaseTest.java | 97 ----
92 files changed, 2575 insertions(+), 2545 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamRecordSqlType.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamRecordSqlType.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamRecordSqlType.java
new file mode 100644
index 0000000..5269867
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamRecordSqlType.java
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql;
+
+import java.math.BigDecimal;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.sdk.coders.BigDecimalCoder;
+import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
+import org.apache.beam.sdk.coders.BigEndianLongCoder;
+import org.apache.beam.sdk.coders.ByteCoder;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper.BooleanCoder;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper.DateCoder;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper.DoubleCoder;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper.FloatCoder;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper.ShortCoder;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper.TimeCoder;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.BeamRecordType;
+
+/**
+ * Type provider for {@link BeamRecord} with SQL types.
+ *
+ * <p>Limited SQL types are supported now, visit
+ * <a href="https://beam.apache.org/blog/2017/07/21/sql-dsl.html#data-type">data types</a>
+ * for more details.
+ *
+ */
+public class BeamRecordSqlType extends BeamRecordType {
+ private static final Map<Integer, Class> SQL_TYPE_TO_JAVA_CLASS = new HashMap<>();
+ static {
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.TINYINT, Byte.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.SMALLINT, Short.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.INTEGER, Integer.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.BIGINT, Long.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.FLOAT, Float.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.DOUBLE, Double.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.DECIMAL, BigDecimal.class);
+
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.BOOLEAN, Boolean.class);
+
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.CHAR, String.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.VARCHAR, String.class);
+
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.TIME, GregorianCalendar.class);
+
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.DATE, Date.class);
+ SQL_TYPE_TO_JAVA_CLASS.put(Types.TIMESTAMP, Date.class);
+ }
+
+ public List<Integer> fieldTypes;
+
+ protected BeamRecordSqlType(List<String> fieldsName, List<Coder> fieldsCoder) {
+ super(fieldsName, fieldsCoder);
+ }
+
+ private BeamRecordSqlType(List<String> fieldsName, List<Integer> fieldTypes
+ , List<Coder> fieldsCoder) {
+ super(fieldsName, fieldsCoder);
+ this.fieldTypes = fieldTypes;
+ }
+
+ public static BeamRecordSqlType create(List<String> fieldNames,
+ List<Integer> fieldTypes) {
+ if (fieldNames.size() != fieldTypes.size()) {
+ throw new IllegalStateException("the sizes of 'dataType' and 'fieldTypes' must match.");
+ }
+ List<Coder> fieldCoders = new ArrayList<>(fieldTypes.size());
+ for (int idx = 0; idx < fieldTypes.size(); ++idx) {
+ switch (fieldTypes.get(idx)) {
+ case Types.INTEGER:
+ fieldCoders.add(BigEndianIntegerCoder.of());
+ break;
+ case Types.SMALLINT:
+ fieldCoders.add(ShortCoder.of());
+ break;
+ case Types.TINYINT:
+ fieldCoders.add(ByteCoder.of());
+ break;
+ case Types.DOUBLE:
+ fieldCoders.add(DoubleCoder.of());
+ break;
+ case Types.FLOAT:
+ fieldCoders.add(FloatCoder.of());
+ break;
+ case Types.DECIMAL:
+ fieldCoders.add(BigDecimalCoder.of());
+ break;
+ case Types.BIGINT:
+ fieldCoders.add(BigEndianLongCoder.of());
+ break;
+ case Types.VARCHAR:
+ case Types.CHAR:
+ fieldCoders.add(StringUtf8Coder.of());
+ break;
+ case Types.TIME:
+ fieldCoders.add(TimeCoder.of());
+ break;
+ case Types.DATE:
+ case Types.TIMESTAMP:
+ fieldCoders.add(DateCoder.of());
+ break;
+ case Types.BOOLEAN:
+ fieldCoders.add(BooleanCoder.of());
+ break;
+
+ default:
+ throw new UnsupportedOperationException(
+ "Data type: " + fieldTypes.get(idx) + " not supported yet!");
+ }
+ }
+ return new BeamRecordSqlType(fieldNames, fieldTypes, fieldCoders);
+ }
+
+ @Override
+ public void validateValueType(int index, Object fieldValue) throws IllegalArgumentException {
+ if (null == fieldValue) {// no need to do type check for NULL value
+ return;
+ }
+
+ int fieldType = fieldTypes.get(index);
+ Class javaClazz = SQL_TYPE_TO_JAVA_CLASS.get(fieldType);
+ if (javaClazz == null) {
+ throw new IllegalArgumentException("Data type: " + fieldType + " not supported yet!");
+ }
+
+ if (!fieldValue.getClass().equals(javaClazz)) {
+ throw new IllegalArgumentException(
+ String.format("[%s](%s) doesn't match type [%s]",
+ fieldValue, fieldValue.getClass(), fieldType)
+ );
+ }
+ }
+
+ public List<Integer> getFieldTypes() {
+ return fieldTypes;
+ }
+
+ public Integer getFieldTypeByIndex(int index){
+ return fieldTypes.get(index);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj != null && obj instanceof BeamRecordSqlType) {
+ BeamRecordSqlType ins = (BeamRecordSqlType) obj;
+ return fieldTypes.equals(ins.getFieldTypes()) && getFieldNames().equals(ins.getFieldNames());
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return 31 * getFieldNames().hashCode() + getFieldTypes().hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return "BeamRecordSqlType [fieldNames=" + getFieldNames()
+ + ", fieldTypes=" + fieldTypes + "]";
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSql.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSql.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSql.java
index bf6a9c0..34355fb 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSql.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSql.java
@@ -17,13 +17,11 @@
*/
package org.apache.beam.sdk.extensions.sql;
-import com.google.auto.value.AutoValue;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.BeamRecordCoder;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode;
-import org.apache.beam.sdk.extensions.sql.schema.BeamPCollectionTable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlUdf;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamPCollectionTable;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.SerializableFunction;
@@ -94,10 +92,7 @@ public class BeamSql {
* </ul>
*/
public static QueryTransform query(String sqlQuery) {
- return QueryTransform.builder()
- .setSqlEnv(new BeamSqlEnv())
- .setSqlQuery(sqlQuery)
- .build();
+ return new QueryTransform(sqlQuery);
}
/**
@@ -109,10 +104,7 @@ public class BeamSql {
* <p>Make sure to query it from a static table name <em>PCOLLECTION</em>.
*/
public static SimpleQueryTransform simpleQuery(String sqlQuery) {
- return SimpleQueryTransform.builder()
- .setSqlEnv(new BeamSqlEnv())
- .setSqlQuery(sqlQuery)
- .build();
+ return new SimpleQueryTransform(sqlQuery);
}
/**
@@ -121,28 +113,22 @@ public class BeamSql {
* <p>The table names in the input {@code PCollectionTuple} are only valid during the current
* query.
*/
- @AutoValue
- public abstract static class QueryTransform extends
+ public static class QueryTransform extends
PTransform<PCollectionTuple, PCollection<BeamRecord>> {
- abstract BeamSqlEnv getSqlEnv();
- abstract String getSqlQuery();
+ private BeamSqlEnv beamSqlEnv = new BeamSqlEnv();
+ private String sqlQuery;
- static Builder builder() {
- return new AutoValue_BeamSql_QueryTransform.Builder();
- }
-
- @AutoValue.Builder
- abstract static class Builder {
- abstract Builder setSqlQuery(String sqlQuery);
- abstract Builder setSqlEnv(BeamSqlEnv sqlEnv);
- abstract QueryTransform build();
+ public QueryTransform(String sqlQuery) {
+ this.sqlQuery = sqlQuery;
}
/**
* register a UDF function used in this query.
+ *
+ * <p>Refer to {@link BeamSqlUdf} for more about how to implement a UDF in BeamSql.
*/
public QueryTransform withUdf(String functionName, Class<? extends BeamSqlUdf> clazz){
- getSqlEnv().registerUdf(functionName, clazz);
+ beamSqlEnv.registerUdf(functionName, clazz);
return this;
}
/**
@@ -150,7 +136,7 @@ public class BeamSql {
* Note, {@link SerializableFunction} must have a constructor without arguments.
*/
public QueryTransform withUdf(String functionName, SerializableFunction sfn){
- getSqlEnv().registerUdf(functionName, sfn);
+ beamSqlEnv.registerUdf(functionName, sfn);
return this;
}
@@ -158,7 +144,7 @@ public class BeamSql {
* register a {@link CombineFn} as UDAF function used in this query.
*/
public QueryTransform withUdaf(String functionName, CombineFn combineFn){
- getSqlEnv().registerUdaf(functionName, combineFn);
+ beamSqlEnv.registerUdaf(functionName, combineFn);
return this;
}
@@ -168,13 +154,13 @@ public class BeamSql {
BeamRelNode beamRelNode = null;
try {
- beamRelNode = getSqlEnv().planner.convertToBeamRel(getSqlQuery());
+ beamRelNode = beamSqlEnv.getPlanner().convertToBeamRel(sqlQuery);
} catch (ValidationException | RelConversionException | SqlParseException e) {
throw new IllegalStateException(e);
}
try {
- return beamRelNode.buildBeamPipeline(input, getSqlEnv());
+ return beamRelNode.buildBeamPipeline(input, beamSqlEnv);
} catch (Exception e) {
throw new IllegalStateException(e);
}
@@ -186,7 +172,7 @@ public class BeamSql {
PCollection<BeamRecord> sourceStream = (PCollection<BeamRecord>) input.get(sourceTag);
BeamRecordCoder sourceCoder = (BeamRecordCoder) sourceStream.getCoder();
- getSqlEnv().registerTable(sourceTag.getId(),
+ beamSqlEnv.registerTable(sourceTag.getId(),
new BeamPCollectionTable(sourceStream,
(BeamRecordSqlType) sourceCoder.getRecordType()));
}
@@ -197,53 +183,47 @@ public class BeamSql {
* A {@link PTransform} representing an execution plan for a SQL query referencing
* a single table.
*/
- @AutoValue
- public abstract static class SimpleQueryTransform
+ public static class SimpleQueryTransform
extends PTransform<PCollection<BeamRecord>, PCollection<BeamRecord>> {
private static final String PCOLLECTION_TABLE_NAME = "PCOLLECTION";
- abstract BeamSqlEnv getSqlEnv();
- abstract String getSqlQuery();
+ private QueryTransform delegate;
- static Builder builder() {
- return new AutoValue_BeamSql_SimpleQueryTransform.Builder();
+ public SimpleQueryTransform(String sqlQuery) {
+ this.delegate = new QueryTransform(sqlQuery);
}
- @AutoValue.Builder
- abstract static class Builder {
- abstract Builder setSqlQuery(String sqlQuery);
- abstract Builder setSqlEnv(BeamSqlEnv sqlEnv);
- abstract SimpleQueryTransform build();
+ /**
+ * register a UDF function used in this query.
+ *
+ * <p>Refer to {@link BeamSqlUdf} for more about how to implement a UDAF in BeamSql.
+ */
+ public SimpleQueryTransform withUdf(String functionName, Class<? extends BeamSqlUdf> clazz){
+ delegate.withUdf(functionName, clazz);
+ return this;
}
/**
- * register a UDF function used in this query.
+ * register {@link SerializableFunction} as a UDF function used in this query.
+ * Note, {@link SerializableFunction} must have a constructor without arguments.
*/
- public SimpleQueryTransform withUdf(String functionName, Class<? extends BeamSqlUdf> clazz){
- getSqlEnv().registerUdf(functionName, clazz);
- return this;
- }
- /**
- * register {@link SerializableFunction} as a UDF function used in this query.
- * Note, {@link SerializableFunction} must have a constructor without arguments.
- */
- public SimpleQueryTransform withUdf(String functionName, SerializableFunction sfn){
- getSqlEnv().registerUdf(functionName, sfn);
- return this;
- }
+ public SimpleQueryTransform withUdf(String functionName, SerializableFunction sfn){
+ delegate.withUdf(functionName, sfn);
+ return this;
+ }
- /**
- * register a {@link CombineFn} as UDAF function used in this query.
- */
- public SimpleQueryTransform withUdaf(String functionName, CombineFn combineFn){
- getSqlEnv().registerUdaf(functionName, combineFn);
- return this;
- }
+ /**
+ * register a {@link CombineFn} as UDAF function used in this query.
+ */
+ public SimpleQueryTransform withUdaf(String functionName, CombineFn combineFn){
+ delegate.withUdaf(functionName, combineFn);
+ return this;
+ }
private void validateQuery() {
SqlNode sqlNode;
try {
- sqlNode = getSqlEnv().planner.parseQuery(getSqlQuery());
- getSqlEnv().planner.getPlanner().close();
+ sqlNode = delegate.beamSqlEnv.getPlanner().parseQuery(delegate.sqlQuery);
+ delegate.beamSqlEnv.getPlanner().getPlanner().close();
} catch (SqlParseException e) {
throw new IllegalStateException(e);
}
@@ -264,10 +244,7 @@ public class BeamSql {
public PCollection<BeamRecord> expand(PCollection<BeamRecord> input) {
validateQuery();
return PCollectionTuple.of(new TupleTag<BeamRecord>(PCOLLECTION_TABLE_NAME), input)
- .apply(QueryTransform.builder()
- .setSqlEnv(getSqlEnv())
- .setSqlQuery(getSqlQuery())
- .build());
+ .apply(delegate);
}
}
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java
deleted file mode 100644
index a43808e..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlCli.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql;
-
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.annotations.Experimental;
-import org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode;
-import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.calcite.plan.RelOptUtil;
-
-/**
- * {@link BeamSqlCli} provides methods to execute Beam SQL with an interactive client.
- */
-@Experimental
-public class BeamSqlCli {
- /**
- * Returns a human readable representation of the query execution plan.
- */
- public static String explainQuery(String sqlString, BeamSqlEnv sqlEnv) throws Exception {
- BeamRelNode exeTree = sqlEnv.planner.convertToBeamRel(sqlString);
- String beamPlan = RelOptUtil.toString(exeTree);
- return beamPlan;
- }
-
- /**
- * compile SQL, and return a {@link Pipeline}.
- */
- public static PCollection<BeamRecord> compilePipeline(String sqlStatement, BeamSqlEnv sqlEnv)
- throws Exception{
- PipelineOptions options = PipelineOptionsFactory.fromArgs(new String[] {}).withValidation()
- .as(PipelineOptions.class); // FlinkPipelineOptions.class
- options.setJobName("BeamPlanCreator");
- Pipeline pipeline = Pipeline.create(options);
-
- return compilePipeline(sqlStatement, pipeline, sqlEnv);
- }
-
- /**
- * compile SQL, and return a {@link Pipeline}.
- */
- public static PCollection<BeamRecord> compilePipeline(String sqlStatement, Pipeline basePipeline,
- BeamSqlEnv sqlEnv) throws Exception{
- PCollection<BeamRecord> resultStream =
- sqlEnv.planner.compileBeamPipeline(sqlStatement, basePipeline, sqlEnv);
- return resultStream;
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlEnv.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlEnv.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlEnv.java
deleted file mode 100644
index 79f2b32..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlEnv.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql;
-
-import java.io.Serializable;
-import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.UdafImpl;
-import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
-import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BaseBeamTable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlUdf;
-import org.apache.beam.sdk.transforms.Combine.CombineFn;
-import org.apache.beam.sdk.transforms.SerializableFunction;
-import org.apache.calcite.DataContext;
-import org.apache.calcite.linq4j.Enumerable;
-import org.apache.calcite.rel.type.RelDataType;
-import org.apache.calcite.rel.type.RelDataTypeFactory;
-import org.apache.calcite.schema.ScannableTable;
-import org.apache.calcite.schema.Schema;
-import org.apache.calcite.schema.SchemaPlus;
-import org.apache.calcite.schema.Statistic;
-import org.apache.calcite.schema.Statistics;
-import org.apache.calcite.schema.impl.ScalarFunctionImpl;
-import org.apache.calcite.tools.Frameworks;
-
-/**
- * {@link BeamSqlEnv} prepares the execution context for {@link BeamSql} and {@link BeamSqlCli}.
- *
- * <p>It contains a {@link SchemaPlus} which holds the metadata of tables/UDF functions, and
- * a {@link BeamQueryPlanner} which parse/validate/optimize/translate input SQL queries.
- */
-public class BeamSqlEnv implements Serializable{
- transient SchemaPlus schema;
- transient BeamQueryPlanner planner;
-
- public BeamSqlEnv() {
- schema = Frameworks.createRootSchema(true);
- planner = new BeamQueryPlanner(schema);
- }
-
- /**
- * Register a UDF function which can be used in SQL expression.
- */
- public void registerUdf(String functionName, Class<? extends BeamSqlUdf> clazz) {
- schema.add(functionName, ScalarFunctionImpl.create(clazz, BeamSqlUdf.UDF_METHOD));
- }
-
- /**
- * register {@link SerializableFunction} as a UDF function which can be used in SQL expression.
- * Note, {@link SerializableFunction} must have a constructor without arguments.
- */
- public void registerUdf(String functionName, SerializableFunction sfn) {
- schema.add(functionName, ScalarFunctionImpl.create(sfn.getClass(), "apply"));
- }
-
- /**
- * Register a {@link CombineFn} as UDAF function which can be used in GROUP-BY expression.
- */
- public void registerUdaf(String functionName, CombineFn combineFn) {
- schema.add(functionName, new UdafImpl(combineFn));
- }
-
- /**
- * Registers a {@link BaseBeamTable} which can be used for all subsequent queries.
- *
- */
- public void registerTable(String tableName, BaseBeamTable table) {
- schema.add(tableName, new BeamCalciteTable(table.getRowType()));
- planner.getSourceTables().put(tableName, table);
- }
-
- /**
- * Find {@link BaseBeamTable} by table name.
- */
- public BaseBeamTable findTable(String tableName){
- return planner.getSourceTables().get(tableName);
- }
-
- private static class BeamCalciteTable implements ScannableTable, Serializable {
- private BeamRecordSqlType beamSqlRowType;
- public BeamCalciteTable(BeamRecordSqlType beamSqlRowType) {
- this.beamSqlRowType = beamSqlRowType;
- }
- @Override
- public RelDataType getRowType(RelDataTypeFactory typeFactory) {
- return CalciteUtils.toCalciteRowType(this.beamSqlRowType)
- .apply(BeamQueryPlanner.TYPE_FACTORY);
- }
-
- @Override
- public Enumerable<Object[]> scan(DataContext root) {
- // not used as Beam SQL uses its own execution engine
- return null;
- }
-
- /**
- * Not used {@link Statistic} to optimize the plan.
- */
- @Override
- public Statistic getStatistic() {
- return Statistics.UNKNOWN;
- }
-
- /**
- * all sources are treated as TABLE in Beam SQL.
- */
- @Override
- public Schema.TableType getJdbcTableType() {
- return Schema.TableType.TABLE;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlRecordHelper.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlRecordHelper.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlRecordHelper.java
new file mode 100644
index 0000000..870165d
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlRecordHelper.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.math.BigDecimal;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.coders.BigDecimalCoder;
+import org.apache.beam.sdk.coders.BigEndianLongCoder;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.CoderException;
+import org.apache.beam.sdk.coders.CustomCoder;
+import org.apache.beam.sdk.values.BeamRecord;
+
+/**
+ * A {@link Coder} encodes {@link BeamRecord}.
+ */
+@Experimental
+public class BeamSqlRecordHelper {
+
+ public static BeamRecordSqlType getSqlRecordType(BeamRecord record) {
+ return (BeamRecordSqlType) record.getDataType();
+ }
+
+ /**
+ * {@link Coder} for Java type {@link Short}.
+ */
+ public static class ShortCoder extends CustomCoder<Short> {
+ private static final ShortCoder INSTANCE = new ShortCoder();
+
+ public static ShortCoder of() {
+ return INSTANCE;
+ }
+
+ private ShortCoder() {
+ }
+
+ @Override
+ public void encode(Short value, OutputStream outStream) throws CoderException, IOException {
+ new DataOutputStream(outStream).writeShort(value);
+ }
+
+ @Override
+ public Short decode(InputStream inStream) throws CoderException, IOException {
+ return new DataInputStream(inStream).readShort();
+ }
+
+ @Override
+ public void verifyDeterministic() throws NonDeterministicException {
+ }
+ }
+ /**
+ * {@link Coder} for Java type {@link Float}, it's stored as {@link BigDecimal}.
+ */
+ public static class FloatCoder extends CustomCoder<Float> {
+ private static final FloatCoder INSTANCE = new FloatCoder();
+ private static final BigDecimalCoder CODER = BigDecimalCoder.of();
+
+ public static FloatCoder of() {
+ return INSTANCE;
+ }
+
+ private FloatCoder() {
+ }
+
+ @Override
+ public void encode(Float value, OutputStream outStream) throws CoderException, IOException {
+ CODER.encode(new BigDecimal(value), outStream);
+ }
+
+ @Override
+ public Float decode(InputStream inStream) throws CoderException, IOException {
+ return CODER.decode(inStream).floatValue();
+ }
+
+ @Override
+ public void verifyDeterministic() throws NonDeterministicException {
+ }
+ }
+ /**
+ * {@link Coder} for Java type {@link Double}, it's stored as {@link BigDecimal}.
+ */
+ public static class DoubleCoder extends CustomCoder<Double> {
+ private static final DoubleCoder INSTANCE = new DoubleCoder();
+ private static final BigDecimalCoder CODER = BigDecimalCoder.of();
+
+ public static DoubleCoder of() {
+ return INSTANCE;
+ }
+
+ private DoubleCoder() {
+ }
+
+ @Override
+ public void encode(Double value, OutputStream outStream) throws CoderException, IOException {
+ CODER.encode(new BigDecimal(value), outStream);
+ }
+
+ @Override
+ public Double decode(InputStream inStream) throws CoderException, IOException {
+ return CODER.decode(inStream).doubleValue();
+ }
+
+ @Override
+ public void verifyDeterministic() throws NonDeterministicException {
+ }
+ }
+
+ /**
+ * {@link Coder} for Java type {@link GregorianCalendar}, it's stored as {@link Long}.
+ */
+ public static class TimeCoder extends CustomCoder<GregorianCalendar> {
+ private static final BigEndianLongCoder longCoder = BigEndianLongCoder.of();
+ private static final TimeCoder INSTANCE = new TimeCoder();
+
+ public static TimeCoder of() {
+ return INSTANCE;
+ }
+
+ private TimeCoder() {
+ }
+
+ @Override
+ public void encode(GregorianCalendar value, OutputStream outStream)
+ throws CoderException, IOException {
+ longCoder.encode(value.getTime().getTime(), outStream);
+ }
+
+ @Override
+ public GregorianCalendar decode(InputStream inStream) throws CoderException, IOException {
+ GregorianCalendar calendar = new GregorianCalendar();
+ calendar.setTime(new Date(longCoder.decode(inStream)));
+ return calendar;
+ }
+
+ @Override
+ public void verifyDeterministic() throws NonDeterministicException {
+ }
+ }
+ /**
+ * {@link Coder} for Java type {@link Date}, it's stored as {@link Long}.
+ */
+ public static class DateCoder extends CustomCoder<Date> {
+ private static final BigEndianLongCoder longCoder = BigEndianLongCoder.of();
+ private static final DateCoder INSTANCE = new DateCoder();
+
+ public static DateCoder of() {
+ return INSTANCE;
+ }
+
+ private DateCoder() {
+ }
+
+ @Override
+ public void encode(Date value, OutputStream outStream) throws CoderException, IOException {
+ longCoder.encode(value.getTime(), outStream);
+ }
+
+ @Override
+ public Date decode(InputStream inStream) throws CoderException, IOException {
+ return new Date(longCoder.decode(inStream));
+ }
+
+ @Override
+ public void verifyDeterministic() throws NonDeterministicException {
+ }
+ }
+
+ /**
+ * {@link Coder} for Java type {@link Boolean}.
+ */
+ public static class BooleanCoder extends CustomCoder<Boolean> {
+ private static final BooleanCoder INSTANCE = new BooleanCoder();
+
+ public static BooleanCoder of() {
+ return INSTANCE;
+ }
+
+ private BooleanCoder() {
+ }
+
+ @Override
+ public void encode(Boolean value, OutputStream outStream) throws CoderException, IOException {
+ new DataOutputStream(outStream).writeBoolean(value);
+ }
+
+ @Override
+ public Boolean decode(InputStream inStream) throws CoderException, IOException {
+ return new DataInputStream(inStream).readBoolean();
+ }
+
+ @Override
+ public void verifyDeterministic() throws NonDeterministicException {
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlUdf.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlUdf.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlUdf.java
new file mode 100644
index 0000000..d4828e7
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/BeamSqlUdf.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql;
+
+import java.io.Serializable;
+
+/**
+ * Interface to create a UDF in Beam SQL.
+ *
+ * <p>A static method {@code eval} is required. Here is an example:
+ *
+ * <blockquote><pre>
+ * public static class MyLeftFunction {
+ * public String eval(
+ * @Parameter(name = "s") String s,
+ * @Parameter(name = "n", optional = true) Integer n) {
+ * return s.substring(0, n == null ? 1 : n);
+ * }
+ * }</pre></blockquote>
+ *
+ * <p>The first parameter is named "s" and is mandatory,
+ * and the second parameter is named "n" and is optional.
+ */
+public interface BeamSqlUdf extends Serializable {
+ String UDF_METHOD = "eval";
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/example/BeamSqlExample.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/example/BeamSqlExample.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/example/BeamSqlExample.java
index 91251cf..0c5dae1 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/example/BeamSqlExample.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/example/BeamSqlExample.java
@@ -21,8 +21,8 @@ import java.sql.Types;
import java.util.Arrays;
import java.util.List;
import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
import org.apache.beam.sdk.extensions.sql.BeamSql;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlCli.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlCli.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlCli.java
new file mode 100644
index 0000000..5c7d920
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlCli.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.calcite.plan.RelOptUtil;
+
+/**
+ * {@link BeamSqlCli} provides methods to execute Beam SQL with an interactive client.
+ */
+@Experimental
+public class BeamSqlCli {
+ /**
+ * Returns a human readable representation of the query execution plan.
+ */
+ public static String explainQuery(String sqlString, BeamSqlEnv sqlEnv) throws Exception {
+ BeamRelNode exeTree = sqlEnv.getPlanner().convertToBeamRel(sqlString);
+ String beamPlan = RelOptUtil.toString(exeTree);
+ return beamPlan;
+ }
+
+ /**
+ * compile SQL, and return a {@link Pipeline}.
+ */
+ public static PCollection<BeamRecord> compilePipeline(String sqlStatement, BeamSqlEnv sqlEnv)
+ throws Exception{
+ PipelineOptions options = PipelineOptionsFactory.fromArgs(new String[] {}).withValidation()
+ .as(PipelineOptions.class);
+ options.setJobName("BeamPlanCreator");
+ Pipeline pipeline = Pipeline.create(options);
+
+ return compilePipeline(sqlStatement, pipeline, sqlEnv);
+ }
+
+ /**
+ * compile SQL, and return a {@link Pipeline}.
+ */
+ public static PCollection<BeamRecord> compilePipeline(String sqlStatement, Pipeline basePipeline,
+ BeamSqlEnv sqlEnv) throws Exception{
+ PCollection<BeamRecord> resultStream = sqlEnv.getPlanner()
+ .compileBeamPipeline(sqlStatement, basePipeline, sqlEnv);
+ return resultStream;
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java
new file mode 100644
index 0000000..fcc9079
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.BeamSql;
+import org.apache.beam.sdk.extensions.sql.BeamSqlUdf;
+import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.UdafImpl;
+import org.apache.beam.sdk.extensions.sql.impl.planner.BeamQueryPlanner;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BaseBeamTable;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamSqlTable;
+import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
+import org.apache.beam.sdk.transforms.Combine;
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.calcite.DataContext;
+import org.apache.calcite.linq4j.Enumerable;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.schema.ScannableTable;
+import org.apache.calcite.schema.Schema;
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.schema.Statistic;
+import org.apache.calcite.schema.Statistics;
+import org.apache.calcite.schema.impl.ScalarFunctionImpl;
+import org.apache.calcite.tools.Frameworks;
+
+/**
+ * {@link BeamSqlEnv} prepares the execution context for {@link BeamSql} and
+ * {@link BeamSqlCli}.
+ *
+ * <p>It contains a {@link SchemaPlus} which holds the metadata of tables/UDF functions,
+ * and a {@link BeamQueryPlanner} which parse/validate/optimize/translate input SQL queries.
+ */
+public class BeamSqlEnv implements Serializable{
+ transient SchemaPlus schema;
+ transient BeamQueryPlanner planner;
+
+ public BeamSqlEnv() {
+ schema = Frameworks.createRootSchema(true);
+ planner = new BeamQueryPlanner(schema);
+ }
+
+ /**
+ * Register a UDF function which can be used in SQL expression.
+ */
+ public void registerUdf(String functionName, Class<? extends BeamSqlUdf> clazz) {
+ schema.add(functionName, ScalarFunctionImpl.create(clazz, BeamSqlUdf.UDF_METHOD));
+ }
+
+ /**
+ * Register {@link SerializableFunction} as a UDF function which can be used in SQL expression.
+ * Note, {@link SerializableFunction} must have a constructor without arguments.
+ */
+ public void registerUdf(String functionName, SerializableFunction sfn) {
+ schema.add(functionName, ScalarFunctionImpl.create(sfn.getClass(), "apply"));
+ }
+
+ /**
+ * Register a UDAF function which can be used in GROUP-BY expression.
+ * See {@link org.apache.beam.sdk.transforms.Combine.CombineFn} on how to implement a UDAF.
+ */
+ public void registerUdaf(String functionName, Combine.CombineFn combineFn) {
+ schema.add(functionName, new UdafImpl(combineFn));
+ }
+
+ /**
+ * Registers a {@link BaseBeamTable} which can be used for all subsequent queries.
+ *
+ */
+ public void registerTable(String tableName, BeamSqlTable table) {
+ schema.add(tableName, new BeamCalciteTable(table.getRowType()));
+ planner.getSourceTables().put(tableName, table);
+ }
+
+ /**
+ * Find {@link BaseBeamTable} by table name.
+ */
+ public BeamSqlTable findTable(String tableName){
+ return planner.getSourceTables().get(tableName);
+ }
+
+ private static class BeamCalciteTable implements ScannableTable, Serializable {
+ private BeamRecordSqlType beamSqlRowType;
+ public BeamCalciteTable(BeamRecordSqlType beamSqlRowType) {
+ this.beamSqlRowType = beamSqlRowType;
+ }
+ @Override
+ public RelDataType getRowType(RelDataTypeFactory typeFactory) {
+ return CalciteUtils.toCalciteRowType(this.beamSqlRowType)
+ .apply(BeamQueryPlanner.TYPE_FACTORY);
+ }
+
+ @Override
+ public Enumerable<Object[]> scan(DataContext root) {
+ // not used as Beam SQL uses its own execution engine
+ return null;
+ }
+
+ /**
+ * Not used {@link Statistic} to optimize the plan.
+ */
+ @Override
+ public Statistic getStatistic() {
+ return Statistics.UNKNOWN;
+ }
+
+ /**
+ * all sources are treated as TABLE in Beam SQL.
+ */
+ @Override
+ public Schema.TableType getJdbcTableType() {
+ return Schema.TableType.TABLE;
+ }
+ }
+
+ public BeamQueryPlanner getPlanner() {
+ return planner;
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/package-info.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/package-info.java
new file mode 100644
index 0000000..de237d6
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Implementation classes of BeamSql.
+ */
+package org.apache.beam.sdk.extensions.sql.impl;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/planner/BeamQueryPlanner.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/planner/BeamQueryPlanner.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/planner/BeamQueryPlanner.java
index b421bc3..410c783 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/planner/BeamQueryPlanner.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/planner/BeamQueryPlanner.java
@@ -23,10 +23,11 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.rel.BeamLogicalConvention;
import org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode;
-import org.apache.beam.sdk.extensions.sql.schema.BaseBeamTable;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BaseBeamTable;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamSqlTable;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
@@ -67,7 +68,7 @@ public class BeamQueryPlanner {
private static final Logger LOG = LoggerFactory.getLogger(BeamQueryPlanner.class);
protected final Planner planner;
- private Map<String, BaseBeamTable> sourceTables = new HashMap<>();
+ private Map<String, BeamSqlTable> sourceTables = new HashMap<>();
public static final JavaTypeFactory TYPE_FACTORY = new JavaTypeFactoryImpl(
RelDataTypeSystem.DEFAULT);
@@ -156,7 +157,7 @@ public class BeamQueryPlanner {
return planner.validate(sqlNode);
}
- public Map<String, BaseBeamTable> getSourceTables() {
+ public Map<String, BeamSqlTable> getSourceTables() {
return sourceTables;
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamAggregationRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamAggregationRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamAggregationRel.java
index 4b557f9..e49e79c 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamAggregationRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamAggregationRel.java
@@ -21,10 +21,10 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.beam.sdk.coders.BeamRecordCoder;
import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.transform.BeamAggregationTransforms;
import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.WithKeys;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamFilterRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamFilterRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamFilterRel.java
index 8fe5be4..9d36a47 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamFilterRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamFilterRel.java
@@ -17,7 +17,7 @@
*/
package org.apache.beam.sdk.extensions.sql.impl.rel;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlExpressionExecutor;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlFnExecutor;
import org.apache.beam.sdk.extensions.sql.impl.transform.BeamSqlFilterFn;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSinkRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSinkRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSinkRel.java
index 1e3eb4c..7bb08c2 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSinkRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSinkRel.java
@@ -19,8 +19,8 @@ package org.apache.beam.sdk.extensions.sql.impl.rel;
import com.google.common.base.Joiner;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
-import org.apache.beam.sdk.extensions.sql.schema.BaseBeamTable;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamSqlTable;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
@@ -65,7 +65,7 @@ public class BeamIOSinkRel extends TableModify implements BeamRelNode {
String sourceName = Joiner.on('.').join(getTable().getQualifiedName());
- BaseBeamTable targetTable = sqlEnv.findTable(sourceName);
+ BeamSqlTable targetTable = sqlEnv.findTable(sourceName);
upstream.apply(stageName, targetTable.buildIOWriter());
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSourceRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSourceRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSourceRel.java
index 254f990..1e4f506 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSourceRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIOSourceRel.java
@@ -18,9 +18,9 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import com.google.common.base.Joiner;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamSqlTable;
import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BaseBeamTable;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
@@ -53,7 +53,7 @@ public class BeamIOSourceRel extends TableScan implements BeamRelNode {
return sourceStream;
} else {
//If not, the source PColection is provided with BaseBeamTable.buildIOReader().
- BaseBeamTable sourceTable = sqlEnv.findTable(sourceName);
+ BeamSqlTable sourceTable = sqlEnv.findTable(sourceName);
return sourceTable.buildIOReader(inputPCollections.getPipeline())
.setCoder(CalciteUtils.toBeamRowType(getRowType()).getRecordCoder());
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRel.java
index 5919329..1ffb636 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRel.java
@@ -19,7 +19,7 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRel.java
index 5ac9575..cc26aa6 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRel.java
@@ -25,10 +25,10 @@ import java.util.Map;
import java.util.Set;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.transform.BeamJoinTransforms;
import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
import org.apache.beam.sdk.transforms.MapElements;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRel.java
index b55252a..6f5dff2 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRel.java
@@ -19,7 +19,7 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamProjectRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamProjectRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamProjectRel.java
index b1ff629..501feb3 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamProjectRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamProjectRel.java
@@ -18,7 +18,7 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlExpressionExecutor;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlFnExecutor;
import org.apache.beam.sdk.extensions.sql.impl.transform.BeamSqlProjectFn;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java
index b8b4293..9e8d46d 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamRelNode.java
@@ -17,7 +17,7 @@
*/
package org.apache.beam.sdk.extensions.sql.impl.rel;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
@@ -33,6 +33,7 @@ public interface BeamRelNode extends RelNode {
* {@code BeamQueryPlanner} visits it with a DFS(Depth-First-Search)
* algorithm.
*/
- PCollection<BeamRecord> buildBeamPipeline(PCollectionTuple inputPCollections, BeamSqlEnv sqlEnv)
+ PCollection<BeamRecord> buildBeamPipeline(
+ PCollectionTuple inputPCollections, BeamSqlEnv sqlEnv)
throws Exception;
}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBase.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBase.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBase.java
index f9cbf4f..a1f3e2b 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBase.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBase.java
@@ -20,7 +20,7 @@ package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.io.Serializable;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.transform.BeamSetOperatorsTransforms;
import org.apache.beam.sdk.transforms.MapElements;
import org.apache.beam.sdk.transforms.ParDo;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java
index 80f3c97..d658638 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRel.java
@@ -25,9 +25,9 @@ import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import org.apache.beam.sdk.coders.ListCoder;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.ParDo;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRel.java
index 63ebdf3..85d676e 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRel.java
@@ -19,7 +19,7 @@
package org.apache.beam.sdk.extensions.sql.impl.rel;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
import org.apache.beam.sdk.transforms.windowing.WindowFn;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRel.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRel.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRel.java
index c4caff3..d684294 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRel.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRel.java
@@ -21,10 +21,10 @@ package org.apache.beam.sdk.extensions.sql.impl.rel;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.List;
-import org.apache.beam.sdk.extensions.sql.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamTableUtils;
import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamTableUtils;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.values.BeamRecord;
import org.apache.beam.sdk.values.PCollection;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BaseBeamTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BaseBeamTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BaseBeamTable.java
new file mode 100644
index 0000000..73e0863
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BaseBeamTable.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+
+/**
+ * Each IO in Beam has one table schema, by extending {@link BaseBeamTable}.
+ */
+public abstract class BaseBeamTable implements BeamSqlTable, Serializable {
+ protected BeamRecordSqlType beamSqlRowType;
+ public BaseBeamTable(BeamRecordSqlType beamSqlRowType) {
+ this.beamSqlRowType = beamSqlRowType;
+ }
+
+ @Override public BeamRecordSqlType getRowType() {
+ return beamSqlRowType;
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamIOType.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamIOType.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamIOType.java
new file mode 100644
index 0000000..5ced467
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamIOType.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema;
+
+import java.io.Serializable;
+
+/**
+ * Type as a source IO, determined whether it's a STREAMING process, or batch
+ * process.
+ */
+public enum BeamIOType implements Serializable {
+ BOUNDED, UNBOUNDED;
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamPCollectionTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamPCollectionTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamPCollectionTable.java
new file mode 100644
index 0000000..31e60e0
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamPCollectionTable.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollection.IsBounded;
+import org.apache.beam.sdk.values.PDone;
+
+/**
+ * {@code BeamPCollectionTable} converts a {@code PCollection<BeamSqlRow>} as a virtual table,
+ * then a downstream query can query directly.
+ */
+public class BeamPCollectionTable extends BaseBeamTable {
+ private BeamIOType ioType;
+ private transient PCollection<BeamRecord> upstream;
+
+ protected BeamPCollectionTable(BeamRecordSqlType beamSqlRowType) {
+ super(beamSqlRowType);
+ }
+
+ public BeamPCollectionTable(PCollection<BeamRecord> upstream,
+ BeamRecordSqlType beamSqlRowType){
+ this(beamSqlRowType);
+ ioType = upstream.isBounded().equals(IsBounded.BOUNDED)
+ ? BeamIOType.BOUNDED : BeamIOType.UNBOUNDED;
+ this.upstream = upstream;
+ }
+
+ @Override
+ public BeamIOType getSourceType() {
+ return ioType;
+ }
+
+ @Override
+ public PCollection<BeamRecord> buildIOReader(Pipeline pipeline) {
+ return upstream;
+ }
+
+ @Override
+ public PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter() {
+ throw new IllegalArgumentException("cannot use [BeamPCollectionTable] as target");
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlTable.java
new file mode 100644
index 0000000..46fba59
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamSqlTable.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PDone;
+
+/**
+ * This interface defines a Beam Sql Table.
+ */
+public interface BeamSqlTable {
+ /**
+ * In Beam SQL, there's no difference between a batch query and a streaming
+ * query. {@link BeamIOType} is used to validate the sources.
+ */
+ BeamIOType getSourceType();
+
+ /**
+ * create a {@code PCollection<BeamSqlRow>} from source.
+ *
+ */
+ PCollection<BeamRecord> buildIOReader(Pipeline pipeline);
+
+ /**
+ * create a {@code IO.write()} instance to write to target.
+ *
+ */
+ PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter();
+
+ /**
+ * Get the schema info of the table.
+ */
+ BeamRecordSqlType getRowType();
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamTableUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamTableUtils.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamTableUtils.java
new file mode 100644
index 0000000..6f7f09b
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/BeamTableUtils.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.util.NlsString;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVPrinter;
+import org.apache.commons.csv.CSVRecord;
+
+/**
+ * Utility methods for working with {@code BeamTable}.
+ */
+public final class BeamTableUtils {
+ public static BeamRecord csvLine2BeamSqlRow(
+ CSVFormat csvFormat,
+ String line,
+ BeamRecordSqlType beamRecordSqlType) {
+ List<Object> fieldsValue = new ArrayList<>(beamRecordSqlType.getFieldCount());
+ try (StringReader reader = new StringReader(line)) {
+ CSVParser parser = csvFormat.parse(reader);
+ CSVRecord rawRecord = parser.getRecords().get(0);
+
+ if (rawRecord.size() != beamRecordSqlType.getFieldCount()) {
+ throw new IllegalArgumentException(String.format(
+ "Expect %d fields, but actually %d",
+ beamRecordSqlType.getFieldCount(), rawRecord.size()
+ ));
+ } else {
+ for (int idx = 0; idx < beamRecordSqlType.getFieldCount(); idx++) {
+ String raw = rawRecord.get(idx);
+ fieldsValue.add(autoCastField(beamRecordSqlType.getFieldTypeByIndex(idx), raw));
+ }
+ }
+ } catch (IOException e) {
+ throw new IllegalArgumentException("decodeRecord failed!", e);
+ }
+ return new BeamRecord(beamRecordSqlType, fieldsValue);
+ }
+
+ public static String beamSqlRow2CsvLine(BeamRecord row, CSVFormat csvFormat) {
+ StringWriter writer = new StringWriter();
+ try (CSVPrinter printer = csvFormat.print(writer)) {
+ for (int i = 0; i < row.getFieldCount(); i++) {
+ printer.print(row.getFieldValue(i).toString());
+ }
+ printer.println();
+ } catch (IOException e) {
+ throw new IllegalArgumentException("encodeRecord failed!", e);
+ }
+ return writer.toString();
+ }
+
+ public static Object autoCastField(int fieldType, Object rawObj) {
+ if (rawObj == null) {
+ return null;
+ }
+
+ SqlTypeName columnType = CalciteUtils.toCalciteType(fieldType);
+ // auto-casting for numberics
+ if ((rawObj instanceof String && SqlTypeName.NUMERIC_TYPES.contains(columnType))
+ || (rawObj instanceof BigDecimal && columnType != SqlTypeName.DECIMAL)) {
+ String raw = rawObj.toString();
+ switch (columnType) {
+ case TINYINT:
+ return Byte.valueOf(raw);
+ case SMALLINT:
+ return Short.valueOf(raw);
+ case INTEGER:
+ return Integer.valueOf(raw);
+ case BIGINT:
+ return Long.valueOf(raw);
+ case FLOAT:
+ return Float.valueOf(raw);
+ case DOUBLE:
+ return Double.valueOf(raw);
+ default:
+ throw new UnsupportedOperationException(
+ String.format("Column type %s is not supported yet!", columnType));
+ }
+ } else if (SqlTypeName.CHAR_TYPES.contains(columnType)) {
+ // convert NlsString to String
+ if (rawObj instanceof NlsString) {
+ return ((NlsString) rawObj).getValue();
+ } else {
+ return rawObj;
+ }
+ } else {
+ return rawObj;
+ }
+ }
+}
[3/5] beam git commit: [BEAM-2740] Hide BeamSqlEnv.
Posted by ta...@apache.org.
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTable.java
new file mode 100644
index 0000000..4bedec1
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaCSVTable.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema.kafka;
+
+import java.util.List;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamTableUtils;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.commons.csv.CSVFormat;
+
+/**
+ * A Kafka topic that saves records as CSV format.
+ *
+ */
+public class BeamKafkaCSVTable extends BeamKafkaTable {
+ private CSVFormat csvFormat;
+ public BeamKafkaCSVTable(BeamRecordSqlType beamSqlRowType, String bootstrapServers,
+ List<String> topics) {
+ this(beamSqlRowType, bootstrapServers, topics, CSVFormat.DEFAULT);
+ }
+
+ public BeamKafkaCSVTable(BeamRecordSqlType beamSqlRowType, String bootstrapServers,
+ List<String> topics, CSVFormat format) {
+ super(beamSqlRowType, bootstrapServers, topics);
+ this.csvFormat = format;
+ }
+
+ @Override
+ public PTransform<PCollection<KV<byte[], byte[]>>, PCollection<BeamRecord>>
+ getPTransformForInput() {
+ return new CsvRecorderDecoder(beamSqlRowType, csvFormat);
+ }
+
+ @Override
+ public PTransform<PCollection<BeamRecord>, PCollection<KV<byte[], byte[]>>>
+ getPTransformForOutput() {
+ return new CsvRecorderEncoder(beamSqlRowType, csvFormat);
+ }
+
+ /**
+ * A PTransform to convert {@code KV<byte[], byte[]>} to {@link BeamRecord}.
+ *
+ */
+ public static class CsvRecorderDecoder
+ extends PTransform<PCollection<KV<byte[], byte[]>>, PCollection<BeamRecord>> {
+ private BeamRecordSqlType rowType;
+ private CSVFormat format;
+ public CsvRecorderDecoder(BeamRecordSqlType rowType, CSVFormat format) {
+ this.rowType = rowType;
+ this.format = format;
+ }
+
+ @Override
+ public PCollection<BeamRecord> expand(PCollection<KV<byte[], byte[]>> input) {
+ return input.apply("decodeRecord", ParDo.of(new DoFn<KV<byte[], byte[]>, BeamRecord>() {
+ @ProcessElement
+ public void processElement(ProcessContext c) {
+ String rowInString = new String(c.element().getValue());
+ c.output(BeamTableUtils.csvLine2BeamSqlRow(format, rowInString, rowType));
+ }
+ }));
+ }
+ }
+
+ /**
+ * A PTransform to convert {@link BeamRecord} to {@code KV<byte[], byte[]>}.
+ *
+ */
+ public static class CsvRecorderEncoder
+ extends PTransform<PCollection<BeamRecord>, PCollection<KV<byte[], byte[]>>> {
+ private BeamRecordSqlType rowType;
+ private CSVFormat format;
+ public CsvRecorderEncoder(BeamRecordSqlType rowType, CSVFormat format) {
+ this.rowType = rowType;
+ this.format = format;
+ }
+
+ @Override
+ public PCollection<KV<byte[], byte[]>> expand(PCollection<BeamRecord> input) {
+ return input.apply("encodeRecord", ParDo.of(new DoFn<BeamRecord, KV<byte[], byte[]>>() {
+ @ProcessElement
+ public void processElement(ProcessContext c) {
+ BeamRecord in = c.element();
+ c.output(KV.of(new byte[] {}, BeamTableUtils.beamSqlRow2CsvLine(in, format).getBytes()));
+ }
+ }));
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaTable.java
new file mode 100644
index 0000000..1113abf
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/BeamKafkaTable.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema.kafka;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.coders.ByteArrayCoder;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BaseBeamTable;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamIOType;
+import org.apache.beam.sdk.io.kafka.KafkaIO;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PDone;
+import org.apache.kafka.common.serialization.ByteArrayDeserializer;
+import org.apache.kafka.common.serialization.ByteArraySerializer;
+
+/**
+ * {@code BeamKafkaTable} represent a Kafka topic, as source or target. Need to
+ * extend to convert between {@code BeamSqlRow} and {@code KV<byte[], byte[]>}.
+ *
+ */
+public abstract class BeamKafkaTable extends BaseBeamTable implements Serializable {
+
+ private String bootstrapServers;
+ private List<String> topics;
+ private Map<String, Object> configUpdates;
+
+ protected BeamKafkaTable(BeamRecordSqlType beamSqlRowType) {
+ super(beamSqlRowType);
+ }
+
+ public BeamKafkaTable(BeamRecordSqlType beamSqlRowType, String bootstrapServers,
+ List<String> topics) {
+ super(beamSqlRowType);
+ this.bootstrapServers = bootstrapServers;
+ this.topics = topics;
+ }
+
+ public BeamKafkaTable updateConsumerProperties(Map<String, Object> configUpdates) {
+ this.configUpdates = configUpdates;
+ return this;
+ }
+
+ @Override
+ public BeamIOType getSourceType() {
+ return BeamIOType.UNBOUNDED;
+ }
+
+ public abstract PTransform<PCollection<KV<byte[], byte[]>>, PCollection<BeamRecord>>
+ getPTransformForInput();
+
+ public abstract PTransform<PCollection<BeamRecord>, PCollection<KV<byte[], byte[]>>>
+ getPTransformForOutput();
+
+ @Override
+ public PCollection<BeamRecord> buildIOReader(Pipeline pipeline) {
+ return PBegin.in(pipeline).apply("read",
+ KafkaIO.<byte[], byte[]>read()
+ .withBootstrapServers(bootstrapServers)
+ .withTopics(topics)
+ .updateConsumerProperties(configUpdates)
+ .withKeyDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of())
+ .withValueDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of())
+ .withoutMetadata())
+ .apply("in_format", getPTransformForInput());
+ }
+
+ @Override
+ public PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter() {
+ checkArgument(topics != null && topics.size() == 1,
+ "Only one topic can be acceptable as output.");
+
+ return new PTransform<PCollection<BeamRecord>, PDone>() {
+ @Override
+ public PDone expand(PCollection<BeamRecord> input) {
+ return input.apply("out_reformat", getPTransformForOutput()).apply("persistent",
+ KafkaIO.<byte[], byte[]>write()
+ .withBootstrapServers(bootstrapServers)
+ .withTopic(topics.get(0))
+ .withKeySerializer(ByteArraySerializer.class)
+ .withValueSerializer(ByteArraySerializer.class));
+ }
+ };
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/package-info.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/package-info.java
new file mode 100644
index 0000000..6752e3c
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/kafka/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * table schema for KafkaIO.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema.kafka;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/package-info.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/package-info.java
new file mode 100644
index 0000000..86e7d06
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * define table schema, to map with Beam IO components.
+ *
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTable.java
new file mode 100644
index 0000000..a2dd6fb
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTable.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema.text;
+
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.io.TextIO;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PDone;
+import org.apache.commons.csv.CSVFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * {@code BeamTextCSVTable} is a {@code BeamTextTable} which formatted in CSV.
+ *
+ * <p>
+ * {@link CSVFormat} itself has many dialects, check its javadoc for more info.
+ * </p>
+ */
+public class BeamTextCSVTable extends BeamTextTable {
+ private static final Logger LOG = LoggerFactory
+ .getLogger(BeamTextCSVTable.class);
+
+ private CSVFormat csvFormat;
+
+ /**
+ * CSV table with {@link CSVFormat#DEFAULT DEFAULT} format.
+ */
+ public BeamTextCSVTable(BeamRecordSqlType beamSqlRowType, String filePattern) {
+ this(beamSqlRowType, filePattern, CSVFormat.DEFAULT);
+ }
+
+ public BeamTextCSVTable(BeamRecordSqlType beamSqlRowType, String filePattern,
+ CSVFormat csvFormat) {
+ super(beamSqlRowType, filePattern);
+ this.csvFormat = csvFormat;
+ }
+
+ @Override
+ public PCollection<BeamRecord> buildIOReader(Pipeline pipeline) {
+ return PBegin.in(pipeline).apply("decodeRecord", TextIO.read().from(filePattern))
+ .apply("parseCSVLine",
+ new BeamTextCSVTableIOReader(beamSqlRowType, filePattern, csvFormat));
+ }
+
+ @Override
+ public PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter() {
+ return new BeamTextCSVTableIOWriter(beamSqlRowType, filePattern, csvFormat);
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOReader.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOReader.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOReader.java
new file mode 100644
index 0000000..95f7063
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOReader.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema.text;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamTableUtils;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.commons.csv.CSVFormat;
+
+/**
+ * IOReader for {@code BeamTextCSVTable}.
+ */
+public class BeamTextCSVTableIOReader
+ extends PTransform<PCollection<String>, PCollection<BeamRecord>>
+ implements Serializable {
+ private String filePattern;
+ protected BeamRecordSqlType beamSqlRowType;
+ protected CSVFormat csvFormat;
+
+ public BeamTextCSVTableIOReader(BeamRecordSqlType beamSqlRowType, String filePattern,
+ CSVFormat csvFormat) {
+ this.filePattern = filePattern;
+ this.beamSqlRowType = beamSqlRowType;
+ this.csvFormat = csvFormat;
+ }
+
+ @Override
+ public PCollection<BeamRecord> expand(PCollection<String> input) {
+ return input.apply(ParDo.of(new DoFn<String, BeamRecord>() {
+ @ProcessElement
+ public void processElement(ProcessContext ctx) {
+ String str = ctx.element();
+ ctx.output(BeamTableUtils.csvLine2BeamSqlRow(csvFormat, str, beamSqlRowType));
+ }
+ }));
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOWriter.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOWriter.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOWriter.java
new file mode 100644
index 0000000..4660ccb
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextCSVTableIOWriter.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema.text;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamTableUtils;
+import org.apache.beam.sdk.io.TextIO;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.values.BeamRecord;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PDone;
+import org.apache.commons.csv.CSVFormat;
+
+/**
+ * IOWriter for {@code BeamTextCSVTable}.
+ */
+public class BeamTextCSVTableIOWriter extends PTransform<PCollection<BeamRecord>, PDone>
+ implements Serializable {
+ private String filePattern;
+ protected BeamRecordSqlType beamSqlRowType;
+ protected CSVFormat csvFormat;
+
+ public BeamTextCSVTableIOWriter(BeamRecordSqlType beamSqlRowType, String filePattern,
+ CSVFormat csvFormat) {
+ this.filePattern = filePattern;
+ this.beamSqlRowType = beamSqlRowType;
+ this.csvFormat = csvFormat;
+ }
+
+ @Override public PDone expand(PCollection<BeamRecord> input) {
+ return input.apply("encodeRecord", ParDo.of(new DoFn<BeamRecord, String>() {
+
+ @ProcessElement public void processElement(ProcessContext ctx) {
+ BeamRecord row = ctx.element();
+ ctx.output(BeamTableUtils.beamSqlRow2CsvLine(row, csvFormat));
+ }
+ })).apply(TextIO.write().to(filePattern));
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextTable.java
new file mode 100644
index 0000000..b0d9c11
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/BeamTextTable.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.beam.sdk.extensions.sql.impl.schema.text;
+
+import java.io.Serializable;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BaseBeamTable;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamIOType;
+
+/**
+ * {@code BeamTextTable} represents a text file/directory(backed by {@code TextIO}).
+ */
+public abstract class BeamTextTable extends BaseBeamTable implements Serializable {
+ protected String filePattern;
+
+ protected BeamTextTable(BeamRecordSqlType beamSqlRowType, String filePattern) {
+ super(beamSqlRowType);
+ this.filePattern = filePattern;
+ }
+
+ @Override
+ public BeamIOType getSourceType() {
+ return BeamIOType.BOUNDED;
+ }
+}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/package-info.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/package-info.java
new file mode 100644
index 0000000..8927dca
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/schema/text/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Table schema for text files.
+ */
+package org.apache.beam.sdk.extensions.sql.impl.schema.text;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java
index 40b7b58..9a50e21 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java
@@ -33,11 +33,11 @@ import org.apache.beam.sdk.coders.CoderException;
import org.apache.beam.sdk.coders.CoderRegistry;
import org.apache.beam.sdk.coders.CustomCoder;
import org.apache.beam.sdk.coders.VarIntCoder;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlInputRefExpression;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.UdafImpl;
import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
import org.apache.beam.sdk.transforms.Count;
import org.apache.beam.sdk.transforms.DoFn;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamJoinTransforms.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamJoinTransforms.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamJoinTransforms.java
index 7a8d10d..3c6b20f 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamJoinTransforms.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamJoinTransforms.java
@@ -22,8 +22,8 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.BeamSqlRecordHelper;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.SimpleFunction;
import org.apache.beam.sdk.values.BeamRecord;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamSqlProjectFn.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamSqlProjectFn.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamSqlProjectFn.java
index aac38c7..719fbf3 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamSqlProjectFn.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamSqlProjectFn.java
@@ -19,10 +19,10 @@ package org.apache.beam.sdk.extensions.sql.impl.transform;
import java.util.ArrayList;
import java.util.List;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlExpressionExecutor;
import org.apache.beam.sdk.extensions.sql.impl.rel.BeamProjectRel;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamTableUtils;
+import org.apache.beam.sdk.extensions.sql.impl.schema.BeamTableUtils;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.values.BeamRecord;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtils.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtils.java
index 8b6206b..8c44780 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtils.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/utils/CalciteUtils.java
@@ -23,7 +23,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
+import org.apache.beam.sdk.extensions.sql.BeamRecordSqlType;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BaseBeamTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BaseBeamTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BaseBeamTable.java
deleted file mode 100644
index 0564820..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BaseBeamTable.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import java.io.Serializable;
-
-/**
- * Each IO in Beam has one table schema, by extending {@link BaseBeamTable}.
- */
-public abstract class BaseBeamTable implements BeamSqlTable, Serializable {
- protected BeamRecordSqlType beamSqlRowType;
- public BaseBeamTable(BeamRecordSqlType beamSqlRowType) {
- this.beamSqlRowType = beamSqlRowType;
- }
-
- @Override public BeamRecordSqlType getRowType() {
- return beamSqlRowType;
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamIOType.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamIOType.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamIOType.java
deleted file mode 100644
index bda3ca1..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamIOType.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import java.io.Serializable;
-
-/**
- * Type as a source IO, determined whether it's a STREAMING process, or batch
- * process.
- */
-public enum BeamIOType implements Serializable {
- BOUNDED, UNBOUNDED;
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamPCollectionTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamPCollectionTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamPCollectionTable.java
deleted file mode 100644
index 9d9988e..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamPCollectionTable.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PCollection.IsBounded;
-import org.apache.beam.sdk.values.PDone;
-
-/**
- * {@code BeamPCollectionTable} converts a {@code PCollection<BeamSqlRow>} as a virtual table,
- * then a downstream query can query directly.
- */
-public class BeamPCollectionTable extends BaseBeamTable {
- private BeamIOType ioType;
- private transient PCollection<BeamRecord> upstream;
-
- protected BeamPCollectionTable(BeamRecordSqlType beamSqlRowType) {
- super(beamSqlRowType);
- }
-
- public BeamPCollectionTable(PCollection<BeamRecord> upstream,
- BeamRecordSqlType beamSqlRowType){
- this(beamSqlRowType);
- ioType = upstream.isBounded().equals(IsBounded.BOUNDED)
- ? BeamIOType.BOUNDED : BeamIOType.UNBOUNDED;
- this.upstream = upstream;
- }
-
- @Override
- public BeamIOType getSourceType() {
- return ioType;
- }
-
- @Override
- public PCollection<BeamRecord> buildIOReader(Pipeline pipeline) {
- return upstream;
- }
-
- @Override
- public PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter() {
- throw new IllegalArgumentException("cannot use [BeamPCollectionTable] as target");
- }
-
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamRecordSqlType.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamRecordSqlType.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamRecordSqlType.java
deleted file mode 100644
index 1845988..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamRecordSqlType.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import java.math.BigDecimal;
-import java.sql.Types;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.sdk.coders.BigDecimalCoder;
-import org.apache.beam.sdk.coders.BigEndianIntegerCoder;
-import org.apache.beam.sdk.coders.BigEndianLongCoder;
-import org.apache.beam.sdk.coders.ByteCoder;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper.BooleanCoder;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper.DateCoder;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper.DoubleCoder;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper.FloatCoder;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper.ShortCoder;
-import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRecordHelper.TimeCoder;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.BeamRecordType;
-
-/**
- * Type provider for {@link BeamRecord} with SQL types.
- *
- * <p>Limited SQL types are supported now, visit
- * <a href="https://beam.apache.org/blog/2017/07/21/sql-dsl.html#data-type">data types</a>
- * for more details.
- *
- */
-public class BeamRecordSqlType extends BeamRecordType {
- private static final Map<Integer, Class> SQL_TYPE_TO_JAVA_CLASS = new HashMap<>();
- static {
- SQL_TYPE_TO_JAVA_CLASS.put(Types.TINYINT, Byte.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.SMALLINT, Short.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.INTEGER, Integer.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.BIGINT, Long.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.FLOAT, Float.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.DOUBLE, Double.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.DECIMAL, BigDecimal.class);
-
- SQL_TYPE_TO_JAVA_CLASS.put(Types.BOOLEAN, Boolean.class);
-
- SQL_TYPE_TO_JAVA_CLASS.put(Types.CHAR, String.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.VARCHAR, String.class);
-
- SQL_TYPE_TO_JAVA_CLASS.put(Types.TIME, GregorianCalendar.class);
-
- SQL_TYPE_TO_JAVA_CLASS.put(Types.DATE, Date.class);
- SQL_TYPE_TO_JAVA_CLASS.put(Types.TIMESTAMP, Date.class);
- }
-
- public List<Integer> fieldTypes;
-
- protected BeamRecordSqlType(List<String> fieldsName, List<Coder> fieldsCoder) {
- super(fieldsName, fieldsCoder);
- }
-
- private BeamRecordSqlType(List<String> fieldsName, List<Integer> fieldTypes
- , List<Coder> fieldsCoder) {
- super(fieldsName, fieldsCoder);
- this.fieldTypes = fieldTypes;
- }
-
- public static BeamRecordSqlType create(List<String> fieldNames,
- List<Integer> fieldTypes) {
- if (fieldNames.size() != fieldTypes.size()) {
- throw new IllegalStateException("the sizes of 'dataType' and 'fieldTypes' must match.");
- }
- List<Coder> fieldCoders = new ArrayList<>(fieldTypes.size());
- for (int idx = 0; idx < fieldTypes.size(); ++idx) {
- switch (fieldTypes.get(idx)) {
- case Types.INTEGER:
- fieldCoders.add(BigEndianIntegerCoder.of());
- break;
- case Types.SMALLINT:
- fieldCoders.add(ShortCoder.of());
- break;
- case Types.TINYINT:
- fieldCoders.add(ByteCoder.of());
- break;
- case Types.DOUBLE:
- fieldCoders.add(DoubleCoder.of());
- break;
- case Types.FLOAT:
- fieldCoders.add(FloatCoder.of());
- break;
- case Types.DECIMAL:
- fieldCoders.add(BigDecimalCoder.of());
- break;
- case Types.BIGINT:
- fieldCoders.add(BigEndianLongCoder.of());
- break;
- case Types.VARCHAR:
- case Types.CHAR:
- fieldCoders.add(StringUtf8Coder.of());
- break;
- case Types.TIME:
- fieldCoders.add(TimeCoder.of());
- break;
- case Types.DATE:
- case Types.TIMESTAMP:
- fieldCoders.add(DateCoder.of());
- break;
- case Types.BOOLEAN:
- fieldCoders.add(BooleanCoder.of());
- break;
-
- default:
- throw new UnsupportedOperationException(
- "Data type: " + fieldTypes.get(idx) + " not supported yet!");
- }
- }
- return new BeamRecordSqlType(fieldNames, fieldTypes, fieldCoders);
- }
-
- @Override
- public void validateValueType(int index, Object fieldValue) throws IllegalArgumentException {
- if (null == fieldValue) {// no need to do type check for NULL value
- return;
- }
-
- int fieldType = fieldTypes.get(index);
- Class javaClazz = SQL_TYPE_TO_JAVA_CLASS.get(fieldType);
- if (javaClazz == null) {
- throw new IllegalArgumentException("Data type: " + fieldType + " not supported yet!");
- }
-
- if (!fieldValue.getClass().equals(javaClazz)) {
- throw new IllegalArgumentException(
- String.format("[%s](%s) doesn't match type [%s]",
- fieldValue, fieldValue.getClass(), fieldType)
- );
- }
- }
-
- public List<Integer> getFieldTypes() {
- return fieldTypes;
- }
-
- public Integer getFieldTypeByIndex(int index){
- return fieldTypes.get(index);
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj != null && obj instanceof BeamRecordSqlType) {
- BeamRecordSqlType ins = (BeamRecordSqlType) obj;
- return fieldTypes.equals(ins.getFieldTypes()) && getFieldNames().equals(ins.getFieldNames());
- } else {
- return false;
- }
- }
-
- @Override
- public int hashCode() {
- return 31 * getFieldNames().hashCode() + getFieldTypes().hashCode();
- }
-
- @Override
- public String toString() {
- return "BeamRecordSqlType [fieldNames=" + getFieldNames()
- + ", fieldTypes=" + fieldTypes + "]";
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRecordHelper.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRecordHelper.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRecordHelper.java
deleted file mode 100644
index 89eefd1..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlRecordHelper.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.math.BigDecimal;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import org.apache.beam.sdk.annotations.Experimental;
-import org.apache.beam.sdk.coders.BigDecimalCoder;
-import org.apache.beam.sdk.coders.BigEndianLongCoder;
-import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.sdk.coders.CoderException;
-import org.apache.beam.sdk.coders.CustomCoder;
-import org.apache.beam.sdk.values.BeamRecord;
-
-/**
- * A {@link Coder} encodes {@link BeamRecord}.
- */
-@Experimental
-public class BeamSqlRecordHelper {
-
- public static BeamRecordSqlType getSqlRecordType(BeamRecord record) {
- return (BeamRecordSqlType) record.getDataType();
- }
-
- /**
- * {@link Coder} for Java type {@link Short}.
- */
- public static class ShortCoder extends CustomCoder<Short> {
- private static final ShortCoder INSTANCE = new ShortCoder();
-
- public static ShortCoder of() {
- return INSTANCE;
- }
-
- private ShortCoder() {
- }
-
- @Override
- public void encode(Short value, OutputStream outStream) throws CoderException, IOException {
- new DataOutputStream(outStream).writeShort(value);
- }
-
- @Override
- public Short decode(InputStream inStream) throws CoderException, IOException {
- return new DataInputStream(inStream).readShort();
- }
-
- @Override
- public void verifyDeterministic() throws NonDeterministicException {
- }
- }
- /**
- * {@link Coder} for Java type {@link Float}, it's stored as {@link BigDecimal}.
- */
- public static class FloatCoder extends CustomCoder<Float> {
- private static final FloatCoder INSTANCE = new FloatCoder();
- private static final BigDecimalCoder CODER = BigDecimalCoder.of();
-
- public static FloatCoder of() {
- return INSTANCE;
- }
-
- private FloatCoder() {
- }
-
- @Override
- public void encode(Float value, OutputStream outStream) throws CoderException, IOException {
- CODER.encode(new BigDecimal(value), outStream);
- }
-
- @Override
- public Float decode(InputStream inStream) throws CoderException, IOException {
- return CODER.decode(inStream).floatValue();
- }
-
- @Override
- public void verifyDeterministic() throws NonDeterministicException {
- }
- }
- /**
- * {@link Coder} for Java type {@link Double}, it's stored as {@link BigDecimal}.
- */
- public static class DoubleCoder extends CustomCoder<Double> {
- private static final DoubleCoder INSTANCE = new DoubleCoder();
- private static final BigDecimalCoder CODER = BigDecimalCoder.of();
-
- public static DoubleCoder of() {
- return INSTANCE;
- }
-
- private DoubleCoder() {
- }
-
- @Override
- public void encode(Double value, OutputStream outStream) throws CoderException, IOException {
- CODER.encode(new BigDecimal(value), outStream);
- }
-
- @Override
- public Double decode(InputStream inStream) throws CoderException, IOException {
- return CODER.decode(inStream).doubleValue();
- }
-
- @Override
- public void verifyDeterministic() throws NonDeterministicException {
- }
- }
-
- /**
- * {@link Coder} for Java type {@link GregorianCalendar}, it's stored as {@link Long}.
- */
- public static class TimeCoder extends CustomCoder<GregorianCalendar> {
- private static final BigEndianLongCoder longCoder = BigEndianLongCoder.of();
- private static final TimeCoder INSTANCE = new TimeCoder();
-
- public static TimeCoder of() {
- return INSTANCE;
- }
-
- private TimeCoder() {
- }
-
- @Override
- public void encode(GregorianCalendar value, OutputStream outStream)
- throws CoderException, IOException {
- longCoder.encode(value.getTime().getTime(), outStream);
- }
-
- @Override
- public GregorianCalendar decode(InputStream inStream) throws CoderException, IOException {
- GregorianCalendar calendar = new GregorianCalendar();
- calendar.setTime(new Date(longCoder.decode(inStream)));
- return calendar;
- }
-
- @Override
- public void verifyDeterministic() throws NonDeterministicException {
- }
- }
- /**
- * {@link Coder} for Java type {@link Date}, it's stored as {@link Long}.
- */
- public static class DateCoder extends CustomCoder<Date> {
- private static final BigEndianLongCoder longCoder = BigEndianLongCoder.of();
- private static final DateCoder INSTANCE = new DateCoder();
-
- public static DateCoder of() {
- return INSTANCE;
- }
-
- private DateCoder() {
- }
-
- @Override
- public void encode(Date value, OutputStream outStream) throws CoderException, IOException {
- longCoder.encode(value.getTime(), outStream);
- }
-
- @Override
- public Date decode(InputStream inStream) throws CoderException, IOException {
- return new Date(longCoder.decode(inStream));
- }
-
- @Override
- public void verifyDeterministic() throws NonDeterministicException {
- }
- }
-
- /**
- * {@link Coder} for Java type {@link Boolean}.
- */
- public static class BooleanCoder extends CustomCoder<Boolean> {
- private static final BooleanCoder INSTANCE = new BooleanCoder();
-
- public static BooleanCoder of() {
- return INSTANCE;
- }
-
- private BooleanCoder() {
- }
-
- @Override
- public void encode(Boolean value, OutputStream outStream) throws CoderException, IOException {
- new DataOutputStream(outStream).writeBoolean(value);
- }
-
- @Override
- public Boolean decode(InputStream inStream) throws CoderException, IOException {
- return new DataInputStream(inStream).readBoolean();
- }
-
- @Override
- public void verifyDeterministic() throws NonDeterministicException {
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlTable.java
deleted file mode 100644
index 828ac43..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlTable.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PDone;
-
-/**
- * This interface defines a Beam Sql Table.
- */
-public interface BeamSqlTable {
- /**
- * In Beam SQL, there's no difference between a batch query and a streaming
- * query. {@link BeamIOType} is used to validate the sources.
- */
- BeamIOType getSourceType();
-
- /**
- * create a {@code PCollection<BeamSqlRow>} from source.
- *
- */
- PCollection<BeamRecord> buildIOReader(Pipeline pipeline);
-
- /**
- * create a {@code IO.write()} instance to write to target.
- *
- */
- PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter();
-
- /**
- * Get the schema info of the table.
- */
- BeamRecordSqlType getRowType();
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlUdf.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlUdf.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlUdf.java
deleted file mode 100644
index 191b78e..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamSqlUdf.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import java.io.Serializable;
-
-/**
- * Interface to create a UDF in Beam SQL.
- *
- * <p>A static method {@code eval} is required. Here is an example:
- *
- * <blockquote><pre>
- * public static class MyLeftFunction {
- * public String eval(
- * @Parameter(name = "s") String s,
- * @Parameter(name = "n", optional = true) Integer n) {
- * return s.substring(0, n == null ? 1 : n);
- * }
- * }</pre></blockquote>
- *
- * <p>The first parameter is named "s" and is mandatory,
- * and the second parameter is named "n" and is optional.
- */
-public interface BeamSqlUdf extends Serializable {
- String UDF_METHOD = "eval";
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamTableUtils.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamTableUtils.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamTableUtils.java
deleted file mode 100644
index 687a082..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/BeamTableUtils.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.math.BigDecimal;
-import java.util.ArrayList;
-import java.util.List;
-import org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.calcite.sql.type.SqlTypeName;
-import org.apache.calcite.util.NlsString;
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVPrinter;
-import org.apache.commons.csv.CSVRecord;
-
-/**
- * Utility methods for working with {@code BeamTable}.
- */
-public final class BeamTableUtils {
- public static BeamRecord csvLine2BeamSqlRow(
- CSVFormat csvFormat,
- String line,
- BeamRecordSqlType beamRecordSqlType) {
- List<Object> fieldsValue = new ArrayList<>(beamRecordSqlType.getFieldCount());
- try (StringReader reader = new StringReader(line)) {
- CSVParser parser = csvFormat.parse(reader);
- CSVRecord rawRecord = parser.getRecords().get(0);
-
- if (rawRecord.size() != beamRecordSqlType.getFieldCount()) {
- throw new IllegalArgumentException(String.format(
- "Expect %d fields, but actually %d",
- beamRecordSqlType.getFieldCount(), rawRecord.size()
- ));
- } else {
- for (int idx = 0; idx < beamRecordSqlType.getFieldCount(); idx++) {
- String raw = rawRecord.get(idx);
- fieldsValue.add(autoCastField(beamRecordSqlType.getFieldTypeByIndex(idx), raw));
- }
- }
- } catch (IOException e) {
- throw new IllegalArgumentException("decodeRecord failed!", e);
- }
- return new BeamRecord(beamRecordSqlType, fieldsValue);
- }
-
- public static String beamSqlRow2CsvLine(BeamRecord row, CSVFormat csvFormat) {
- StringWriter writer = new StringWriter();
- try (CSVPrinter printer = csvFormat.print(writer)) {
- for (int i = 0; i < row.getFieldCount(); i++) {
- printer.print(row.getFieldValue(i).toString());
- }
- printer.println();
- } catch (IOException e) {
- throw new IllegalArgumentException("encodeRecord failed!", e);
- }
- return writer.toString();
- }
-
- public static Object autoCastField(int fieldType, Object rawObj) {
- if (rawObj == null) {
- return null;
- }
-
- SqlTypeName columnType = CalciteUtils.toCalciteType(fieldType);
- // auto-casting for numberics
- if ((rawObj instanceof String && SqlTypeName.NUMERIC_TYPES.contains(columnType))
- || (rawObj instanceof BigDecimal && columnType != SqlTypeName.DECIMAL)) {
- String raw = rawObj.toString();
- switch (columnType) {
- case TINYINT:
- return Byte.valueOf(raw);
- case SMALLINT:
- return Short.valueOf(raw);
- case INTEGER:
- return Integer.valueOf(raw);
- case BIGINT:
- return Long.valueOf(raw);
- case FLOAT:
- return Float.valueOf(raw);
- case DOUBLE:
- return Double.valueOf(raw);
- default:
- throw new UnsupportedOperationException(
- String.format("Column type %s is not supported yet!", columnType));
- }
- } else if (SqlTypeName.CHAR_TYPES.contains(columnType)) {
- // convert NlsString to String
- if (rawObj instanceof NlsString) {
- return ((NlsString) rawObj).getValue();
- } else {
- return rawObj;
- }
- } else {
- return rawObj;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTable.java
deleted file mode 100644
index 8c7e6f0..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaCSVTable.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema.kafka;
-
-import java.util.List;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamTableUtils;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.commons.csv.CSVFormat;
-
-/**
- * A Kafka topic that saves records as CSV format.
- *
- */
-public class BeamKafkaCSVTable extends BeamKafkaTable {
- private CSVFormat csvFormat;
- public BeamKafkaCSVTable(BeamRecordSqlType beamSqlRowType, String bootstrapServers,
- List<String> topics) {
- this(beamSqlRowType, bootstrapServers, topics, CSVFormat.DEFAULT);
- }
-
- public BeamKafkaCSVTable(BeamRecordSqlType beamSqlRowType, String bootstrapServers,
- List<String> topics, CSVFormat format) {
- super(beamSqlRowType, bootstrapServers, topics);
- this.csvFormat = format;
- }
-
- @Override
- public PTransform<PCollection<KV<byte[], byte[]>>, PCollection<BeamRecord>>
- getPTransformForInput() {
- return new CsvRecorderDecoder(beamSqlRowType, csvFormat);
- }
-
- @Override
- public PTransform<PCollection<BeamRecord>, PCollection<KV<byte[], byte[]>>>
- getPTransformForOutput() {
- return new CsvRecorderEncoder(beamSqlRowType, csvFormat);
- }
-
- /**
- * A PTransform to convert {@code KV<byte[], byte[]>} to {@link BeamRecord}.
- *
- */
- public static class CsvRecorderDecoder
- extends PTransform<PCollection<KV<byte[], byte[]>>, PCollection<BeamRecord>> {
- private BeamRecordSqlType rowType;
- private CSVFormat format;
- public CsvRecorderDecoder(BeamRecordSqlType rowType, CSVFormat format) {
- this.rowType = rowType;
- this.format = format;
- }
-
- @Override
- public PCollection<BeamRecord> expand(PCollection<KV<byte[], byte[]>> input) {
- return input.apply("decodeRecord", ParDo.of(new DoFn<KV<byte[], byte[]>, BeamRecord>() {
- @ProcessElement
- public void processElement(ProcessContext c) {
- String rowInString = new String(c.element().getValue());
- c.output(BeamTableUtils.csvLine2BeamSqlRow(format, rowInString, rowType));
- }
- }));
- }
- }
-
- /**
- * A PTransform to convert {@link BeamRecord} to {@code KV<byte[], byte[]>}.
- *
- */
- public static class CsvRecorderEncoder
- extends PTransform<PCollection<BeamRecord>, PCollection<KV<byte[], byte[]>>> {
- private BeamRecordSqlType rowType;
- private CSVFormat format;
- public CsvRecorderEncoder(BeamRecordSqlType rowType, CSVFormat format) {
- this.rowType = rowType;
- this.format = format;
- }
-
- @Override
- public PCollection<KV<byte[], byte[]>> expand(PCollection<BeamRecord> input) {
- return input.apply("encodeRecord", ParDo.of(new DoFn<BeamRecord, KV<byte[], byte[]>>() {
- @ProcessElement
- public void processElement(ProcessContext c) {
- BeamRecord in = c.element();
- c.output(KV.of(new byte[] {}, BeamTableUtils.beamSqlRow2CsvLine(in, format).getBytes()));
- }
- }));
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaTable.java
deleted file mode 100644
index 1d57839..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/BeamKafkaTable.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.extensions.sql.schema.kafka;
-
-import static com.google.common.base.Preconditions.checkArgument;
-
-import java.io.Serializable;
-import java.util.List;
-import java.util.Map;
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.coders.ByteArrayCoder;
-import org.apache.beam.sdk.extensions.sql.schema.BaseBeamTable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamIOType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.io.kafka.KafkaIO;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PBegin;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PDone;
-import org.apache.kafka.common.serialization.ByteArrayDeserializer;
-import org.apache.kafka.common.serialization.ByteArraySerializer;
-
-/**
- * {@code BeamKafkaTable} represent a Kafka topic, as source or target. Need to
- * extend to convert between {@code BeamSqlRow} and {@code KV<byte[], byte[]>}.
- *
- */
-public abstract class BeamKafkaTable extends BaseBeamTable implements Serializable {
-
- private String bootstrapServers;
- private List<String> topics;
- private Map<String, Object> configUpdates;
-
- protected BeamKafkaTable(BeamRecordSqlType beamSqlRowType) {
- super(beamSqlRowType);
- }
-
- public BeamKafkaTable(BeamRecordSqlType beamSqlRowType, String bootstrapServers,
- List<String> topics) {
- super(beamSqlRowType);
- this.bootstrapServers = bootstrapServers;
- this.topics = topics;
- }
-
- public BeamKafkaTable updateConsumerProperties(Map<String, Object> configUpdates) {
- this.configUpdates = configUpdates;
- return this;
- }
-
- @Override
- public BeamIOType getSourceType() {
- return BeamIOType.UNBOUNDED;
- }
-
- public abstract PTransform<PCollection<KV<byte[], byte[]>>, PCollection<BeamRecord>>
- getPTransformForInput();
-
- public abstract PTransform<PCollection<BeamRecord>, PCollection<KV<byte[], byte[]>>>
- getPTransformForOutput();
-
- @Override
- public PCollection<BeamRecord> buildIOReader(Pipeline pipeline) {
- return PBegin.in(pipeline).apply("read",
- KafkaIO.<byte[], byte[]>read()
- .withBootstrapServers(bootstrapServers)
- .withTopics(topics)
- .updateConsumerProperties(configUpdates)
- .withKeyDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of())
- .withValueDeserializerAndCoder(ByteArrayDeserializer.class, ByteArrayCoder.of())
- .withoutMetadata())
- .apply("in_format", getPTransformForInput());
- }
-
- @Override
- public PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter() {
- checkArgument(topics != null && topics.size() == 1,
- "Only one topic can be acceptable as output.");
-
- return new PTransform<PCollection<BeamRecord>, PDone>() {
- @Override
- public PDone expand(PCollection<BeamRecord> input) {
- return input.apply("out_reformat", getPTransformForOutput()).apply("persistent",
- KafkaIO.<byte[], byte[]>write()
- .withBootstrapServers(bootstrapServers)
- .withTopic(topics.get(0))
- .withKeySerializer(ByteArraySerializer.class)
- .withValueSerializer(ByteArraySerializer.class));
- }
- };
- }
-
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/package-info.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/package-info.java
deleted file mode 100644
index f0ddeb6..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/kafka/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * table schema for KafkaIO.
- */
-package org.apache.beam.sdk.extensions.sql.schema.kafka;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/package-info.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/package-info.java
deleted file mode 100644
index 9655ebd..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * define table schema, to map with Beam IO components.
- *
- */
-package org.apache.beam.sdk.extensions.sql.schema;
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTable.java
deleted file mode 100644
index 79e56e6..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTable.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema.text;
-
-import org.apache.beam.sdk.Pipeline;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.PBegin;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PDone;
-import org.apache.commons.csv.CSVFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * {@code BeamTextCSVTable} is a {@code BeamTextTable} which formatted in CSV.
- *
- * <p>
- * {@link CSVFormat} itself has many dialects, check its javadoc for more info.
- * </p>
- */
-public class BeamTextCSVTable extends BeamTextTable {
- private static final Logger LOG = LoggerFactory
- .getLogger(BeamTextCSVTable.class);
-
- private CSVFormat csvFormat;
-
- /**
- * CSV table with {@link CSVFormat#DEFAULT DEFAULT} format.
- */
- public BeamTextCSVTable(BeamRecordSqlType beamSqlRowType, String filePattern) {
- this(beamSqlRowType, filePattern, CSVFormat.DEFAULT);
- }
-
- public BeamTextCSVTable(BeamRecordSqlType beamSqlRowType, String filePattern,
- CSVFormat csvFormat) {
- super(beamSqlRowType, filePattern);
- this.csvFormat = csvFormat;
- }
-
- @Override
- public PCollection<BeamRecord> buildIOReader(Pipeline pipeline) {
- return PBegin.in(pipeline).apply("decodeRecord", TextIO.read().from(filePattern))
- .apply("parseCSVLine",
- new BeamTextCSVTableIOReader(beamSqlRowType, filePattern, csvFormat));
- }
-
- @Override
- public PTransform<? super PCollection<BeamRecord>, PDone> buildIOWriter() {
- return new BeamTextCSVTableIOWriter(beamSqlRowType, filePattern, csvFormat);
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOReader.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOReader.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOReader.java
deleted file mode 100644
index 018dae5..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOReader.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema.text;
-
-import java.io.Serializable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamTableUtils;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.commons.csv.CSVFormat;
-
-/**
- * IOReader for {@code BeamTextCSVTable}.
- */
-public class BeamTextCSVTableIOReader
- extends PTransform<PCollection<String>, PCollection<BeamRecord>>
- implements Serializable {
- private String filePattern;
- protected BeamRecordSqlType beamSqlRowType;
- protected CSVFormat csvFormat;
-
- public BeamTextCSVTableIOReader(BeamRecordSqlType beamSqlRowType, String filePattern,
- CSVFormat csvFormat) {
- this.filePattern = filePattern;
- this.beamSqlRowType = beamSqlRowType;
- this.csvFormat = csvFormat;
- }
-
- @Override
- public PCollection<BeamRecord> expand(PCollection<String> input) {
- return input.apply(ParDo.of(new DoFn<String, BeamRecord>() {
- @ProcessElement
- public void processElement(ProcessContext ctx) {
- String str = ctx.element();
- ctx.output(BeamTableUtils.csvLine2BeamSqlRow(csvFormat, str, beamSqlRowType));
- }
- }));
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOWriter.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOWriter.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOWriter.java
deleted file mode 100644
index 53eb382..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextCSVTableIOWriter.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema.text;
-
-import java.io.Serializable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamTableUtils;
-import org.apache.beam.sdk.io.TextIO;
-import org.apache.beam.sdk.transforms.DoFn;
-import org.apache.beam.sdk.transforms.PTransform;
-import org.apache.beam.sdk.transforms.ParDo;
-import org.apache.beam.sdk.values.BeamRecord;
-import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.sdk.values.PDone;
-import org.apache.commons.csv.CSVFormat;
-
-/**
- * IOWriter for {@code BeamTextCSVTable}.
- */
-public class BeamTextCSVTableIOWriter extends PTransform<PCollection<BeamRecord>, PDone>
- implements Serializable {
- private String filePattern;
- protected BeamRecordSqlType beamSqlRowType;
- protected CSVFormat csvFormat;
-
- public BeamTextCSVTableIOWriter(BeamRecordSqlType beamSqlRowType, String filePattern,
- CSVFormat csvFormat) {
- this.filePattern = filePattern;
- this.beamSqlRowType = beamSqlRowType;
- this.csvFormat = csvFormat;
- }
-
- @Override public PDone expand(PCollection<BeamRecord> input) {
- return input.apply("encodeRecord", ParDo.of(new DoFn<BeamRecord, String>() {
-
- @ProcessElement public void processElement(ProcessContext ctx) {
- BeamRecord row = ctx.element();
- ctx.output(BeamTableUtils.beamSqlRow2CsvLine(row, csvFormat));
- }
- })).apply(TextIO.write().to(filePattern));
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextTable.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextTable.java
deleted file mode 100644
index 80e81aa..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/BeamTextTable.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.sdk.extensions.sql.schema.text;
-
-import java.io.Serializable;
-import org.apache.beam.sdk.extensions.sql.schema.BaseBeamTable;
-import org.apache.beam.sdk.extensions.sql.schema.BeamIOType;
-import org.apache.beam.sdk.extensions.sql.schema.BeamRecordSqlType;
-
-/**
- * {@code BeamTextTable} represents a text file/directory(backed by {@code TextIO}).
- */
-public abstract class BeamTextTable extends BaseBeamTable implements Serializable {
- protected String filePattern;
-
- protected BeamTextTable(BeamRecordSqlType beamSqlRowType, String filePattern) {
- super(beamSqlRowType);
- this.filePattern = filePattern;
- }
-
- @Override
- public BeamIOType getSourceType() {
- return BeamIOType.BOUNDED;
- }
-}
http://git-wip-us.apache.org/repos/asf/beam/blob/49aad927/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/package-info.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/package-info.java
deleted file mode 100644
index f914e2e..0000000
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/schema/text/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Table schema for text files.
- */
-package org.apache.beam.sdk.extensions.sql.schema.text;