You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ar...@apache.org on 2017/11/13 12:07:15 UTC
[01/11] drill git commit: DRILL-5337: OpenTSDB storage plugin
Repository: drill
Updated Branches:
refs/heads/master 29e054769 -> 3036d3700
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/schema/OpenTSDBSchemaFactory.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/schema/OpenTSDBSchemaFactory.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/schema/OpenTSDBSchemaFactory.java
new file mode 100644
index 0000000..cca39d8
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/schema/OpenTSDBSchemaFactory.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.schema;
+
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.schema.Table;
+import org.apache.drill.exec.store.AbstractSchema;
+import org.apache.drill.exec.store.SchemaConfig;
+import org.apache.drill.exec.store.SchemaFactory;
+import org.apache.drill.exec.store.openTSDB.DrillOpenTSDBTable;
+import org.apache.drill.exec.store.openTSDB.OpenTSDBScanSpec;
+import org.apache.drill.exec.store.openTSDB.OpenTSDBStoragePlugin;
+import org.apache.drill.exec.store.openTSDB.OpenTSDBStoragePluginConfig;
+import org.apache.drill.exec.store.openTSDB.client.Schema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Set;
+
+public class OpenTSDBSchemaFactory implements SchemaFactory {
+
+ private static final Logger log = LoggerFactory.getLogger(OpenTSDBSchemaFactory.class);
+
+ private final String schemaName;
+ private final OpenTSDBStoragePlugin plugin;
+
+ public OpenTSDBSchemaFactory(OpenTSDBStoragePlugin plugin, String schemaName) {
+ this.plugin = plugin;
+ this.schemaName = schemaName;
+ }
+
+ @Override
+ public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) throws IOException {
+ OpenTSDBSchema schema = new OpenTSDBSchema(schemaName);
+ parent.add(schemaName, schema);
+ }
+
+ class OpenTSDBSchema extends AbstractSchema {
+
+ OpenTSDBSchema(String name) {
+ super(Collections.<String>emptyList(), name);
+ }
+
+ @Override
+ public Table getTable(String name) {
+ OpenTSDBScanSpec scanSpec = new OpenTSDBScanSpec(name);
+ return new DrillOpenTSDBTable(schemaName, plugin, new Schema(plugin.getClient(), name), scanSpec);
+ }
+
+ @Override
+ public Set<String> getTableNames() {
+ return plugin.getClient().getAllMetricNames();
+ }
+
+ @Override
+ public String getTypeName() {
+ return OpenTSDBStoragePluginConfig.NAME;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/resources/bootstrap-storage-plugins.json
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/resources/bootstrap-storage-plugins.json b/contrib/storage-opentsdb/src/main/resources/bootstrap-storage-plugins.json
new file mode 100644
index 0000000..d1055c1
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/resources/bootstrap-storage-plugins.json
@@ -0,0 +1,9 @@
+{
+ "storage": {
+ openTSDB: {
+ type: "openTSDB",
+ connection: "http://localhost:10000",
+ enabled: false
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/resources/drill-module.conf
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/resources/drill-module.conf b/contrib/storage-opentsdb/src/main/resources/drill-module.conf
new file mode 100644
index 0000000..d5743da
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/resources/drill-module.conf
@@ -0,0 +1,21 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// This file tells Drill to consider this module when class path scanning.
+// This file can also include any supplementary configuration information.
+// This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information.
+drill.classpath.scanning: {
+ packages += "org.apache.drill.exec.store.openTSDB"
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestDataHolder.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestDataHolder.java b/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestDataHolder.java
new file mode 100644
index 0000000..c6e7228
--- /dev/null
+++ b/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestDataHolder.java
@@ -0,0 +1,247 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.store.openTSDB;
+
+public class TestDataHolder {
+
+ public static final String SAMPLE_DATA_FOR_POST_REQUEST_WITH_TAGS = "[{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{\"symbol\":\"VOD.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1407165399\":196.3000030517578," +
+ "\"1407165402\":196.3000030517578," +
+ "\"1407165405\":196.3000030517578," +
+ "\"1407165407\":196.3000030517578," +
+ "\"1407165410\":196.3000030517578," +
+ "\"1407165422\":196.3000030517578," +
+ "\"1488271956\":111.11000061035156}}," +
+ "{\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{\"symbol\":\"BP.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1407165399\":484.20001220703125," +
+ "\"1407165403\":484.1499938964844," +
+ "\"1407165405\":484.1499938964844," +
+ "\"1407165408\":484.1499938964844," +
+ "\"1407165419\":484.1499938964844," +
+ "\"1407165423\":484.2550048828125}}," +
+ "{\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{\"symbol\":\"BARC.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1407165401\":224.14999389648438," +
+ "\"1407165404\":224.14999389648438," +
+ "\"1407165406\":224.14999389648438," +
+ "\"1407165409\":224.14999389648438," +
+ "\"1407165422\":224.14999389648438}" +
+ "}]";
+
+ public static final String SAMPLE_DATA_FOR_GET_TABLE_REQUEST =
+ "[{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{}," +
+ "\"aggregateTags\":[\"symbol\"]," +
+ "\"dps\":{" +
+ "\"1407165399\":680.5000152587891," +
+ "\"1407165401\":904.625," +
+ "\"1407165402\":904.6124954223633," +
+ "\"1407165403\":904.5999908447266," +
+ "\"1407165404\":904.5999908447266," +
+ "\"1407165405\":904.5999908447266," +
+ "\"1407165406\":904.5999908447266," +
+ "\"1407165407\":904.5999908447266," +
+ "\"1407165408\":904.5999908447266," +
+ "\"1407165409\":904.5999908447266," +
+ "\"1407165410\":904.5999908447266," +
+ "\"1407165419\":904.5999908447266," +
+ "\"1407165422\":904.6787490844727," +
+ "\"1407165423\":680.5550068842233," +
+ "\"1488271956\":111.11000061035156}" +
+ "}]";
+
+ public static final String SAMPLE_DATA_FOR_POST_DOWNSAMPLE_REQUEST_WITH_TAGS =
+ "[{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{\"symbol\":\"VOD.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1261440000\":196.3000030517578," +
+ "\"1419120000\":111.11000061035156}" +
+ "},{" +
+ "\"metric\":\"warp.speed.test\"" +
+ ",\"tags\":{\"symbol\":\"BP.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1261440000\":484.1758321126302}" +
+ "},{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{" +
+ "\"symbol\":\"BARC.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1261440000\":224.14999389648438}" +
+ "}]";
+
+ public static final String SAMPLE_DATA_FOR_GET_TABLE_NAME_REQUEST = "[\"warp.speed.test\"]";
+
+ public static final String SAMPLE_DATA_FOR_POST_DOWNSAMPLE_REQUEST_WITHOUT_TAGS =
+ "[{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{}," +
+ "\"aggregateTags\":[" +
+ "\"symbol\"]," +
+ "\"dps\":{" +
+ "\"1261440000\":904.6258290608723," +
+ "\"1419120000\":111.11000061035156}" +
+ "}]";
+
+ public static final String SAMPLE_DATA_FOR_POST_END_REQUEST_WITHOUT_TAGS =
+ "[{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{}," +
+ "\"aggregateTags\":[" +
+ "\"symbol\"]," +
+ "\"dps\":{" +
+ "\"1407165399\":680.5000152587891," +
+ "\"1407165401\":904.625," +
+ "\"1407165402\":904.6124954223633," +
+ "\"1419120000\":904.5999908447266}" +
+ "}]";
+
+ public static final String DOWNSAMPLE_REQUEST_WTIHOUT_TAGS =
+ "{" +
+ "\"start\":\"47y-ago\"," +
+ "\"end\":null," +
+ "\"queries\":[{" +
+ "\"aggregator\":\"sum\"," +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"rate\":null," +
+ "\"downsample\":\"5y-avg\"," +
+ "\"tags\":{}" +
+ "}]" +
+ "}";
+
+ public static final String END_PARAM_REQUEST_WTIHOUT_TAGS =
+ "{" +
+ "\"start\":\"47y-ago\"," +
+ "\"end\":\"1407165403000\"," +
+ "\"queries\":[{" +
+ "\"aggregator\":\"sum\"," +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"rate\":null," +
+ "\"downsample\":null," +
+ "\"tags\":{}" +
+ "}]" +
+ "}";
+
+
+ public static final String DOWNSAMPLE_REQUEST_WITH_TAGS =
+ "{" +
+ "\"start\":\"47y-ago\"," +
+ "\"end\":null," +
+ "\"queries\":[{" +
+ "\"aggregator\":\"sum\"," +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"rate\":null," +
+ "\"downsample\":\"5y-avg\"," +
+ "\"tags\":{" +
+ "\"symbol\":\"*\"}" +
+ "}]" +
+ "}";
+
+ public static final String END_PARAM_REQUEST_WITH_TAGS =
+ "{" +
+ "\"start\":\"47y-ago\"," +
+ "\"end\":\"1407165403000\"," +
+ "\"queries\":[{" +
+ "\"aggregator\":\"sum\"," +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"rate\":null," +
+ "\"downsample\":null," +
+ "\"tags\":{" +
+ "\"symbol\":\"*\"}" +
+ "}]" +
+ "}";
+
+ public static final String REQUEST_TO_NONEXISTENT_METRIC =
+ "{" +
+ "\"start\":\"47y-ago\"," +
+ "\"end\":null," +
+ "\"queries\":[{" +
+ "\"aggregator\":\"sum\"," +
+ "\"metric\":\"warp.spee\"," +
+ "\"rate\":null," +
+ "\"downsample\":null," +
+ "\"tags\":{" + "}" +
+ "}]" +
+ "}";
+
+
+ public static final String POST_REQUEST_WITHOUT_TAGS =
+ "{" +
+ "\"start\":\"47y-ago\"," +
+ "\"end\":null," +
+ "\"queries\":[{" +
+ "\"aggregator\":\"sum\"," +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"rate\":null," +
+ "\"downsample\":null," +
+ "\"tags\":{}" +
+ "}]" +
+ "}";
+
+
+ public static final String POST_REQUEST_WITH_TAGS =
+ "{" +
+ "\"start\":\"47y-ago\"," +
+ "\"end\":null," +
+ "\"queries\":[{" +
+ "\"aggregator\":\"sum\"," +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"rate\":null," +
+ "\"downsample\":null," +
+ "\"tags\":{" +
+ "\"symbol\":\"*\"}" +
+ "}]" +
+ "}";
+
+ public static final String SAMPLE_DATA_FOR_POST_END_REQUEST_WITH_TAGS =
+ "[{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{\"symbol\":\"VOD.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1407165399\":196.3000030517578," +
+ "\"1407165402\":196.3000030517578}" +
+ "},{" +
+ "\"metric\":\"warp.speed.test\"" +
+ ",\"tags\":{\"symbol\":\"BP.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1407165399\":484.20001220703125," +
+ "\"1407165403\":484.1499938964844}" +
+ "},{" +
+ "\"metric\":\"warp.speed.test\"," +
+ "\"tags\":{" +
+ "\"symbol\":\"BARC.L\"}," +
+ "\"aggregateTags\":[]," +
+ "\"dps\":{" +
+ "\"1407165401\":224.14999389648438}" +
+ "}]";
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java b/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java
new file mode 100644
index 0000000..2d6c506
--- /dev/null
+++ b/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.store.openTSDB;
+
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import org.apache.drill.PlanTestBase;
+import org.apache.drill.common.exceptions.UserRemoteException;
+import org.apache.drill.exec.store.StoragePluginRegistry;
+import org.apache.drill.exec.store.openTSDB.OpenTSDBStoragePlugin;
+import org.apache.drill.exec.store.openTSDB.OpenTSDBStoragePluginConfig;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+
+import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
+import static com.github.tomakehurst.wiremock.client.WireMock.equalToJson;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.post;
+import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
+import static org.apache.drill.store.openTSDB.TestDataHolder.DOWNSAMPLE_REQUEST_WITH_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.DOWNSAMPLE_REQUEST_WTIHOUT_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.END_PARAM_REQUEST_WITH_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.END_PARAM_REQUEST_WTIHOUT_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.POST_REQUEST_WITHOUT_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.POST_REQUEST_WITH_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.REQUEST_TO_NONEXISTENT_METRIC;
+import static org.apache.drill.store.openTSDB.TestDataHolder.SAMPLE_DATA_FOR_GET_TABLE_NAME_REQUEST;
+import static org.apache.drill.store.openTSDB.TestDataHolder.SAMPLE_DATA_FOR_GET_TABLE_REQUEST;
+import static org.apache.drill.store.openTSDB.TestDataHolder.SAMPLE_DATA_FOR_POST_DOWNSAMPLE_REQUEST_WITHOUT_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.SAMPLE_DATA_FOR_POST_DOWNSAMPLE_REQUEST_WITH_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.SAMPLE_DATA_FOR_POST_END_REQUEST_WITHOUT_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.SAMPLE_DATA_FOR_POST_END_REQUEST_WITH_TAGS;
+import static org.apache.drill.store.openTSDB.TestDataHolder.SAMPLE_DATA_FOR_POST_REQUEST_WITH_TAGS;
+
+public class TestOpenTSDBPlugin extends PlanTestBase {
+
+ protected static OpenTSDBStoragePlugin storagePlugin;
+ protected static OpenTSDBStoragePluginConfig storagePluginConfig;
+
+ @Rule
+ public WireMockRule wireMockRule = new WireMockRule(10000);
+
+ @BeforeClass
+ public static void setup() throws Exception {
+ final StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
+ storagePlugin = (OpenTSDBStoragePlugin) pluginRegistry.getPlugin(OpenTSDBStoragePluginConfig.NAME);
+ storagePluginConfig = storagePlugin.getConfig();
+ storagePluginConfig.setEnabled(true);
+ pluginRegistry.createOrUpdate(OpenTSDBStoragePluginConfig.NAME, storagePluginConfig, true);
+ }
+
+ @Before
+ public void init() {
+ setupPostStubs();
+ setupGetStubs();
+ }
+
+ private void setupGetStubs() {
+ wireMockRule.stubFor(get(urlEqualTo("/api/suggest?type=metrics&max=" + Integer.MAX_VALUE))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/json")
+ .withBody(SAMPLE_DATA_FOR_GET_TABLE_NAME_REQUEST)));
+
+ wireMockRule.stubFor(get(urlEqualTo("/api/query?start=47y-ago&m=sum:warp.speed.test"))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withBody(SAMPLE_DATA_FOR_GET_TABLE_REQUEST)
+ ));
+ }
+
+ private void setupPostStubs() {
+ wireMockRule.stubFor(post(urlEqualTo("/api/query"))
+ .withRequestBody(equalToJson(POST_REQUEST_WITHOUT_TAGS))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/json")
+ .withBody(SAMPLE_DATA_FOR_GET_TABLE_REQUEST)));
+
+ wireMockRule.stubFor(post(urlEqualTo("/api/query"))
+ .withRequestBody(equalToJson(POST_REQUEST_WITH_TAGS))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/json")
+ .withBody(SAMPLE_DATA_FOR_POST_REQUEST_WITH_TAGS)));
+
+ wireMockRule.stubFor(post(urlEqualTo("/api/query"))
+ .withRequestBody(equalToJson(DOWNSAMPLE_REQUEST_WTIHOUT_TAGS))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/json")
+ .withBody(SAMPLE_DATA_FOR_POST_DOWNSAMPLE_REQUEST_WITHOUT_TAGS)));
+
+ wireMockRule.stubFor(post(urlEqualTo("/api/query"))
+ .withRequestBody(equalToJson(END_PARAM_REQUEST_WTIHOUT_TAGS))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/json")
+ .withBody(SAMPLE_DATA_FOR_POST_END_REQUEST_WITHOUT_TAGS)));
+
+ wireMockRule.stubFor(post(urlEqualTo("/api/query"))
+ .withRequestBody(equalToJson(DOWNSAMPLE_REQUEST_WITH_TAGS))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/json")
+ .withBody(SAMPLE_DATA_FOR_POST_DOWNSAMPLE_REQUEST_WITH_TAGS)));
+
+ wireMockRule.stubFor(post(urlEqualTo("/api/query"))
+ .withRequestBody(equalToJson(END_PARAM_REQUEST_WITH_TAGS))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/json")
+ .withBody(SAMPLE_DATA_FOR_POST_END_REQUEST_WITH_TAGS)));
+
+ wireMockRule.stubFor(post(urlEqualTo("/api/query"))
+ .withRequestBody(equalToJson(REQUEST_TO_NONEXISTENT_METRIC))
+ .willReturn(aResponse()
+ .withStatus(400)
+ .withHeader("Content-Type", "application/json")
+ ));
+ }
+
+ @Test
+ public void testBasicQueryFromWithRequiredParams() throws Exception {
+ String query =
+ "select * from openTSDB.`(metric=warp.speed.test, start=47y-ago, aggregator=sum)`";
+ Assert.assertEquals(18, testSql(query));
+ }
+
+ @Test
+ public void testBasicQueryGroupBy() throws Exception {
+ String query =
+ "select `timestamp`, sum(`aggregated value`) from openTSDB.`(metric=warp.speed.test, aggregator=sum, start=47y-ago)` group by `timestamp`";
+ Assert.assertEquals(15, testSql(query));
+ }
+
+ @Test
+ public void testBasicQueryFromWithInterpolationParam() throws Exception {
+ String query = "select * from openTSDB.`(metric=warp.speed.test, aggregator=sum, start=47y-ago, downsample=5y-avg)`";
+ Assert.assertEquals(4, testSql(query));
+ }
+
+ @Test
+ public void testBasicQueryFromWithEndParam() throws Exception {
+ String query = "select * from openTSDB.`(metric=warp.speed.test, aggregator=sum, start=47y-ago, end=1407165403000))`";
+ Assert.assertEquals(5, testSql(query));
+ }
+
+ @Test(expected = UserRemoteException.class)
+ public void testBasicQueryWithoutTableName() throws Exception {
+ test("select * from openTSDB.``;");
+ }
+
+ @Test(expected = UserRemoteException.class)
+ public void testBasicQueryWithNonExistentTableName() throws Exception {
+ test("select * from openTSDB.`warp.spee`");
+ }
+
+ @Test
+ public void testPhysicalPlanExecutionBasedOnQuery() throws Exception {
+ String query = "EXPLAIN PLAN for select * from openTSDB.`(metric=warp.speed.test, start=47y-ago, aggregator=sum)`";
+ String plan = getPlanInString(query, JSON_FORMAT);
+ Assert.assertEquals(18, testPhysical(plan));
+ }
+
+ @Test
+ public void testDescribe() throws Exception {
+ test("use openTSDB");
+ test("describe `warp.speed.test`");
+ Assert.assertEquals(1, testSql("show tables"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/distribution/pom.xml
----------------------------------------------------------------------
diff --git a/distribution/pom.xml b/distribution/pom.xml
index 86c3d11..9bb21d6 100644
--- a/distribution/pom.xml
+++ b/distribution/pom.xml
@@ -223,6 +223,11 @@
</dependency>
<dependency>
<groupId>org.apache.drill.contrib</groupId>
+ <artifactId>drill-opentsdb-storage</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.drill.contrib</groupId>
<artifactId>drill-mongo-storage</artifactId>
<version>${project.version}</version>
</dependency>
[09/11] drill git commit: DRILL-5921: Display counter metrics in table
Posted by ar...@apache.org.
DRILL-5921: Display counter metrics in table
closes #1020
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/ed6c4bc8
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/ed6c4bc8
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/ed6c4bc8
Branch: refs/heads/master
Commit: ed6c4bc8e18b55e2aaac731f6cb4bc0f4480e490
Parents: 99e4504
Author: Prasad Nagaraj Subramanya <pr...@gmail.com>
Authored: Wed Nov 8 23:18:42 2017 -0800
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:45:20 2017 +0200
----------------------------------------------------------------------
.../src/main/resources/rest/metrics/metrics.ftl | 34 +++++++++++---------
1 file changed, 18 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/ed6c4bc8/exec/java-exec/src/main/resources/rest/metrics/metrics.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/rest/metrics/metrics.ftl b/exec/java-exec/src/main/resources/rest/metrics/metrics.ftl
index 1600a2d..fbd2f4e 100644
--- a/exec/java-exec/src/main/resources/rest/metrics/metrics.ftl
+++ b/exec/java-exec/src/main/resources/rest/metrics/metrics.ftl
@@ -63,12 +63,6 @@
<strong>No histograms.</strong>
</div>
</div>
- <h3 id="meters">Meters</h3>
- <div id="metersVal">
- <div class="alert alert-info">
- <strong>No meters.</strong>
- </div>
- </div>
<h3 id="timers">Timers</h3>
<div id="timersVal">
<div class="alert alert-info">
@@ -98,6 +92,22 @@
});
};
+ function createCountersTable(counters) {
+ $("#countersVal").html(function() {
+ var table = "<table class=\"table table-striped\" id=\"countersTable\">";
+ table += "<tbody>";
+ $.each(counters, function(key, value) {
+ table += "<tr>";
+ table += "<td>" + key + "</td>";
+ table += "<td>" + value.count + "</td>";
+ table += "</tr>";
+ });
+ table += "</tbody>";
+ table += "</table>";
+ return table;
+ });
+ };
+
function updateBars(gauges) {
$.each(["heap","non-heap","total"], function(i, key) {
var used = gauges[key + ".used"].value;
@@ -138,21 +148,13 @@
});
};
- function updateOthers(metrics) {
- $.each(["counters", "meters"], function(i, key) {
- if(! $.isEmptyObject(metrics[key])) {
- $("#" + key + "Val").html(JSON.stringify(metrics[key], null, 2));
- }
- });
- };
-
var update = function() {
$.get("/status/metrics", function(metrics) {
updateGauges(metrics.gauges);
updateBars(metrics.gauges);
if(! $.isEmptyObject(metrics.timers)) createTable(metrics.timers, "timers");
if(! $.isEmptyObject(metrics.histograms)) createTable(metrics.histograms, "histograms");
- updateOthers(metrics);
+ if(! $.isEmptyObject(metrics.counters)) createCountersTable(metrics.counters);
});
};
@@ -161,4 +163,4 @@
</script>
</#macro>
-<@page_html/>
\ No newline at end of file
+<@page_html/>
[08/11] drill git commit: DRILL-5909: Added new Counter metrics
Posted by ar...@apache.org.
DRILL-5909: Added new Counter metrics
closes #1019
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/99e4504f
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/99e4504f
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/99e4504f
Branch: refs/heads/master
Commit: 99e4504f2aa9d11a6bc49a57f5edb70f8d621747
Parents: df95709
Author: Prasad Nagaraj Subramanya <pr...@gmail.com>
Authored: Wed Nov 1 13:49:43 2017 -0700
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:06:53 2017 +0200
----------------------------------------------------------------------
.../org/apache/drill/exec/work/foreman/Foreman.java | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/99e4504f/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
index 10d8537..a1d150e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
@@ -126,6 +126,9 @@ public class Foreman implements Runnable {
private static final Counter enqueuedQueries = DrillMetrics.getRegistry().counter("drill.queries.enqueued");
private static final Counter runningQueries = DrillMetrics.getRegistry().counter("drill.queries.running");
private static final Counter completedQueries = DrillMetrics.getRegistry().counter("drill.queries.completed");
+ private static final Counter succeededQueries = DrillMetrics.getRegistry().counter("drill.queries.succeeded");
+ private static final Counter failedQueries = DrillMetrics.getRegistry().counter("drill.queries.failed");
+ private static final Counter canceledQueries = DrillMetrics.getRegistry().counter("drill.queries.canceled");
private final QueryId queryId;
private final String queryIdString;
@@ -837,6 +840,19 @@ public class Foreman implements Runnable {
logger.warn("unable to close query manager", e);
}
+ // Incrementing QueryState counters
+ switch (state) {
+ case FAILED:
+ failedQueries.inc();
+ break;
+ case CANCELED:
+ canceledQueries.inc();
+ break;
+ case COMPLETED:
+ succeededQueries.inc();
+ break;
+ }
+
runningQueries.dec();
completedQueries.inc();
try {
[05/11] drill git commit: DRILL-5863: Sortable table incorrectly
sorts fragments/time lexically
Posted by ar...@apache.org.
DRILL-5863: Sortable table incorrectly sorts fragments/time lexically
The DataTables jQuery library sorts data based on the value of the element in a column.
However, since Drill publishes sortable items like fragment IDs and time durations as non-numeric text, the sorting is incorrect.
This PR fixes the fragment and duration ordering based on their implicit numeric values (minor ID and millisecond representation, respectively).
Support memory chaining
closes #987
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/59c74472
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/59c74472
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/59c74472
Branch: refs/heads/master
Commit: 59c7447262a22f7f1099f1e0f6d33d44acf8813f
Parents: c1118a3
Author: Kunal Khatua <kk...@maprtech.com>
Authored: Wed Oct 11 21:35:47 2017 -0700
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:05:51 2017 +0200
----------------------------------------------------------------------
.../server/rest/profile/FragmentWrapper.java | 7 ++-
.../server/rest/profile/OperatorWrapper.java | 5 +-
.../exec/server/rest/profile/TableBuilder.java | 54 +++++++++++++++-----
3 files changed, 52 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/59c74472/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/FragmentWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/FragmentWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/FragmentWrapper.java
index 2233f2e..5496f83 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/FragmentWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/FragmentWrapper.java
@@ -19,7 +19,9 @@ package org.apache.drill.exec.server.rest.profile;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import org.apache.drill.exec.proto.UserBitShared.MajorFragmentProfile;
import org.apache.drill.exec.proto.UserBitShared.MinorFragmentProfile;
@@ -228,6 +230,8 @@ public class FragmentWrapper {
Collections2.filter(major.getMinorFragmentProfileList(), Filters.missingOperatorsOrTimes));
Collections.sort(complete, Comparators.minorId);
+
+ Map<String, String> attributeMap = new HashMap<String, String>(); //Reusing for different fragments
for (final MinorFragmentProfile minor : complete) {
final ArrayList<OperatorProfile> ops = new ArrayList<>(minor.getOperatorProfileList());
@@ -244,7 +248,8 @@ public class FragmentWrapper {
biggestBatches = Math.max(biggestBatches, batches);
}
- builder.appendCell(new OperatorPathBuilder().setMajor(major).setMinor(minor).build());
+ attributeMap.put("data-order", String.valueOf(minor.getMinorFragmentId())); //Overwrite values from previous fragments
+ builder.appendCell(new OperatorPathBuilder().setMajor(major).setMinor(minor).build(), attributeMap);
builder.appendCell(minor.getEndpoint().getAddress());
builder.appendMillis(minor.getStartTime() - start);
builder.appendMillis(minor.getEndTime() - start);
http://git-wip-us.apache.org/repos/asf/drill/blob/59c74472/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/OperatorWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/OperatorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/OperatorWrapper.java
index cca9563..6322435 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/OperatorWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/OperatorWrapper.java
@@ -21,6 +21,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
@@ -76,12 +77,14 @@ public class OperatorWrapper {
public String getContent() {
TableBuilder builder = new TableBuilder(OPERATOR_COLUMNS, OPERATOR_COLUMNS_TOOLTIP, true);
+ Map<String, String> attributeMap = new HashMap<String, String>(); //Reusing for different fragments
for (ImmutablePair<ImmutablePair<OperatorProfile, Integer>, String> ip : opsAndHosts) {
int minor = ip.getLeft().getRight();
OperatorProfile op = ip.getLeft().getLeft();
+ attributeMap.put("data-order", String.valueOf(minor)); //Overwrite values from previous fragments
String path = new OperatorPathBuilder().setMajor(major).setMinor(minor).setOperator(op).build();
- builder.appendCell(path);
+ builder.appendCell(path, attributeMap);
builder.appendCell(ip.getRight());
builder.appendNanos(op.getSetupNanos());
builder.appendNanos(op.getProcessNanos());
http://git-wip-us.apache.org/repos/asf/drill/blob/59c74472/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/TableBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/TableBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/TableBuilder.java
index b49382b..3833f51 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/TableBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/TableBuilder.java
@@ -21,7 +21,9 @@ import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.text.SimpleDateFormat;
+import java.util.HashMap;
import java.util.Locale;
+import java.util.Map;
public class TableBuilder {
private final NumberFormat format = NumberFormat.getInstance(Locale.US);
@@ -71,16 +73,35 @@ public class TableBuilder {
}
public void appendCell(final String s, final String link, final String titleText, final String backgroundColor) {
+ appendCell(s, link, titleText, backgroundColor, null);
+ }
+
+ public void appendCell(final String s, final Map<String, String> kvPairs) {
+ appendCell(s, null, null, null, kvPairs);
+ }
+
+ public void appendCell(final String s, final String link, final String titleText, final String backgroundColor,
+ final Map<String, String> kvPairs) {
if (w == 0) {
sb.append("<tr"
+ (backgroundColor == null ? "" : " style=\"background-color:"+backgroundColor+"\"")
+ ">");
}
+ StringBuilder tdElemSB = new StringBuilder("<td");
+ //Injecting title if specified (legacy impl)
if (titleText != null && titleText.length() > 0) {
- sb.append(String.format("<td title=\""+titleText+"\">%s%s</td>", s, link != null ? link : ""));
- } else {
- sb.append(String.format("<td>%s%s</td>", s, link != null ? link : ""));
+ tdElemSB.append(" title=\""+titleText+"\"");
+ }
+ //Extract other attributes for injection into element
+ if (kvPairs != null) {
+ for (String attributeName : kvPairs.keySet()) {
+ String attributeText = " " + attributeName + "=\"" + kvPairs.get(attributeName) + "\"";
+ tdElemSB.append(attributeText);
+ }
}
+ //Closing <td>
+ tdElemSB.append(String.format(">%s%s</td>", s, link != null ? link : ""));
+ sb.append(tdElemSB);
if (++w >= width) {
sb.append("</tr>\n");
w = 0;
@@ -98,27 +119,33 @@ public class TableBuilder {
}
public void appendTime(final long d) {
- appendCell(dateFormat.format(d), null, null);
+ appendTime(d, null);
}
public void appendTime(final long d, final String link) {
- appendCell(dateFormat.format(d), link, null);
+ appendTime(d, link, null);
}
public void appendTime(final long d, final String link, final String tooltip) {
- appendCell(dateFormat.format(d), link, tooltip);
+ //Embedding dataTable's data-order attribute
+ Map<String, String> attributeMap = new HashMap<String, String>();
+ attributeMap.put("data-order", String.valueOf(d));
+ appendCell(dateFormat.format(d), link, tooltip, null, attributeMap);
}
public void appendMillis(final long p) {
- appendCell((new SimpleDurationFormat(0, p)).compact(), null, null);
+ appendMillis(p, null);
}
public void appendMillis(final long p, final String link) {
- appendCell((new SimpleDurationFormat(0, p)).compact(), link, null);
+ appendMillis(p, link, null);
}
public void appendMillis(final long p, final String link, final String tooltip) {
- appendCell((new SimpleDurationFormat(0, p)).compact(), link, tooltip);
+ //Embedding dataTable's data-order attribute
+ Map<String, String> attributeMap = new HashMap<String, String>();
+ attributeMap.put("data-order", String.valueOf(p));
+ appendCell((new SimpleDurationFormat(0, p)).compact(), link, tooltip, null, attributeMap);
}
public void appendNanos(final long p) {
@@ -174,15 +201,18 @@ public class TableBuilder {
}
public void appendBytes(final long l) {
- appendCell(bytePrint(l), null, null);
+ appendBytes(l, null);
}
public void appendBytes(final long l, final String link) {
- appendCell(bytePrint(l), link, null);
+ appendBytes(l, link, null);
}
public void appendBytes(final long l, final String link, final String tooltip) {
- appendCell(bytePrint(l), link, tooltip);
+ //Embedding dataTable's data-order attribute
+ Map<String, String> attributeMap = new HashMap<String, String>();
+ attributeMap.put("data-order", String.valueOf(l));
+ appendCell(bytePrint(l), link, tooltip, null, attributeMap);
}
private String bytePrint(final long size) {
[11/11] drill git commit: DRILL-5795: Parquet Filter push down now
work at rowgroup level
Posted by ar...@apache.org.
DRILL-5795: Parquet Filter push down now work at rowgroup level
Before this commit, the filter was pruning complete files. When a file
is composed of multiple rowgroups, it was not able to prune one
rowgroup from the file. Now, when the filter find that a rowgroup
doesn't match it will be remove from the scan.
closes #949
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/3036d370
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/3036d370
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/3036d370
Branch: refs/heads/master
Commit: 3036d3700aa620bbbffc260e52f633cdaae1172c
Parents: 30da051
Author: Damien Profeta <da...@amadeus.com>
Authored: Fri Sep 15 11:01:58 2017 -0700
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:45:21 2017 +0200
----------------------------------------------------------------------
.../exec/store/parquet/ParquetGroupScan.java | 171 ++++++++++---------
.../parquet/TestParquetFilterPushDown.java | 10 ++
.../resources/parquet/multirowgroup.parquet | Bin 0 -> 398 bytes
3 files changed, 103 insertions(+), 78 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/3036d370/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
index 4e38ce9..972332c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
@@ -738,7 +738,7 @@ public class ParquetGroupScan extends AbstractFileGroupScan {
private EndpointByteMap byteMap;
private int rowGroupIndex;
- private String root;
+ private List<? extends ColumnMetadata> columns;
private long rowCount; // rowCount = -1 indicates to include all rows.
private long numRecordsToRead;
@@ -791,6 +791,14 @@ public class ParquetGroupScan extends AbstractFileGroupScan {
return rowCount;
}
+ public List<? extends ColumnMetadata> getColumns() {
+ return columns;
+ }
+
+ public void setColumns(List<? extends ColumnMetadata> columns) {
+ this.columns = columns;
+ }
+
}
/**
@@ -962,69 +970,70 @@ public class ParquetGroupScan extends AbstractFileGroupScan {
}
}
rowGroupInfo.setEndpointByteMap(endpointByteMap);
+ rowGroupInfo.setColumns(rg.getColumns());
rgIndex++;
rowGroupInfos.add(rowGroupInfo);
}
}
this.endpointAffinities = AffinityCreator.getAffinityMap(rowGroupInfos);
+ updatePartitionColTypeMap();
+ }
+ private void updatePartitionColTypeMap() {
columnValueCounts = Maps.newHashMap();
this.rowCount = 0;
boolean first = true;
- for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
- for (RowGroupMetadata rowGroup : file.getRowGroups()) {
- long rowCount = rowGroup.getRowCount();
- for (ColumnMetadata column : rowGroup.getColumns()) {
- SchemaPath schemaPath = SchemaPath.getCompoundPath(column.getName());
- Long previousCount = columnValueCounts.get(schemaPath);
- if (previousCount != null) {
- if (previousCount != GroupScan.NO_COLUMN_STATS) {
- if (column.getNulls() != null) {
- Long newCount = rowCount - column.getNulls();
- columnValueCounts.put(schemaPath, columnValueCounts.get(schemaPath) + newCount);
- }
- }
- } else {
+ for (RowGroupInfo rowGroup : this.rowGroupInfos) {
+ long rowCount = rowGroup.getRowCount();
+ for (ColumnMetadata column : rowGroup.getColumns()) {
+ SchemaPath schemaPath = SchemaPath.getCompoundPath(column.getName());
+ Long previousCount = columnValueCounts.get(schemaPath);
+ if (previousCount != null) {
+ if (previousCount != GroupScan.NO_COLUMN_STATS) {
if (column.getNulls() != null) {
Long newCount = rowCount - column.getNulls();
- columnValueCounts.put(schemaPath, newCount);
- } else {
- columnValueCounts.put(schemaPath, GroupScan.NO_COLUMN_STATS);
+ columnValueCounts.put(schemaPath, columnValueCounts.get(schemaPath) + newCount);
}
}
- boolean partitionColumn = checkForPartitionColumn(column, first, rowCount);
- if (partitionColumn) {
- Map<SchemaPath, Object> map = partitionValueMap.get(file.getPath());
- if (map == null) {
- map = Maps.newHashMap();
- partitionValueMap.put(file.getPath(), map);
+ } else {
+ if (column.getNulls() != null) {
+ Long newCount = rowCount - column.getNulls();
+ columnValueCounts.put(schemaPath, newCount);
+ } else {
+ columnValueCounts.put(schemaPath, GroupScan.NO_COLUMN_STATS);
+ }
+ }
+ boolean partitionColumn = checkForPartitionColumn(column, first, rowCount);
+ if (partitionColumn) {
+ Map<SchemaPath, Object> map = partitionValueMap.get(rowGroup.getPath());
+ if (map == null) {
+ map = Maps.newHashMap();
+ partitionValueMap.put(rowGroup.getPath(), map);
+ }
+ Object value = map.get(schemaPath);
+ Object currentValue = column.getMaxValue();
+ if (value != null) {
+ if (value != currentValue) {
+ partitionColTypeMap.remove(schemaPath);
}
- Object value = map.get(schemaPath);
- Object currentValue = column.getMaxValue();
- if (value != null) {
- if (value != currentValue) {
- partitionColTypeMap.remove(schemaPath);
- }
+ } else {
+ // the value of a column with primitive type can not be null,
+ // so checks that there are really null value and puts it to the map
+ if (rowCount == column.getNulls()) {
+ map.put(schemaPath, null);
} else {
- // the value of a column with primitive type can not be null,
- // so checks that there are really null value and puts it to the map
- if (rowCount == column.getNulls()) {
- map.put(schemaPath, null);
- } else {
- map.put(schemaPath, currentValue);
- }
+ map.put(schemaPath, currentValue);
}
- } else {
- partitionColTypeMap.remove(schemaPath);
}
+ } else {
+ partitionColTypeMap.remove(schemaPath);
}
- this.rowCount += rowGroup.getRowCount();
- first = false;
}
+ this.rowCount += rowGroup.getRowCount();
+ first = false;
}
}
-
private ParquetTableMetadataBase removeUnneededRowGroups(ParquetTableMetadataBase parquetTableMetadata) {
List<ParquetFileMetadata> newFileMetadataList = Lists.newArrayList();
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
@@ -1121,6 +1130,7 @@ public class ParquetGroupScan extends AbstractFileGroupScan {
return "ParquetGroupScan [entries=" + entries
+ ", selectionRoot=" + selectionRoot
+ ", numFiles=" + getEntries().size()
+ + ", numRowGroups=" + rowGroupInfos.size()
+ ", usedMetadataFile=" + usedMetadataCache
+ filterStr
+ cacheFileString
@@ -1231,7 +1241,7 @@ public class ParquetGroupScan extends AbstractFileGroupScan {
public GroupScan applyFilter(LogicalExpression filterExpr, UdfUtilities udfUtilities,
FunctionImplementationRegistry functionImplementationRegistry, OptionManager optionManager) {
- if (fileSet.size() == 1 ||
+ if (rowGroupInfos.size() == 1 ||
! (parquetTableMetadata.isRowGroupPrunable()) ||
rowGroupInfos.size() > optionManager.getOption(PlannerSettings.PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD)
) {
@@ -1244,66 +1254,71 @@ public class ParquetGroupScan extends AbstractFileGroupScan {
final Set<SchemaPath> schemaPathsInExpr = filterExpr.accept(new ParquetRGFilterEvaluator.FieldReferenceFinder(), null);
- final List<RowGroupMetadata> qualifiedRGs = new ArrayList<>(parquetTableMetadata.getFiles().size());
+ final List<RowGroupInfo> qualifiedRGs = new ArrayList<>(rowGroupInfos.size());
Set<String> qualifiedFileNames = Sets.newHashSet(); // HashSet keeps a fileName unique.
ParquetFilterPredicate filterPredicate = null;
- for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
+ for (RowGroupInfo rowGroup : rowGroupInfos) {
final ColumnExplorer columnExplorer = new ColumnExplorer(optionManager, this.columns);
- Map<String, String> implicitColValues = columnExplorer.populateImplicitColumns(file.getPath(), selectionRoot);
+ Map<String, String> implicitColValues = columnExplorer.populateImplicitColumns(rowGroup.getPath(), selectionRoot);
- for (RowGroupMetadata rowGroup : file.getRowGroups()) {
- ParquetMetaStatCollector statCollector = new ParquetMetaStatCollector(
- parquetTableMetadata,
- rowGroup.getColumns(),
- implicitColValues);
+ ParquetMetaStatCollector statCollector = new ParquetMetaStatCollector(
+ parquetTableMetadata,
+ rowGroup.getColumns(),
+ implicitColValues);
- Map<SchemaPath, ColumnStatistics> columnStatisticsMap = statCollector.collectColStat(schemaPathsInExpr);
+ Map<SchemaPath, ColumnStatistics> columnStatisticsMap = statCollector.collectColStat(schemaPathsInExpr);
- if (filterPredicate == null) {
- ErrorCollector errorCollector = new ErrorCollectorImpl();
- LogicalExpression materializedFilter = ExpressionTreeMaterializer.materializeFilterExpr(
- filterExpr, columnStatisticsMap, errorCollector, functionImplementationRegistry);
-
- if (errorCollector.hasErrors()) {
- logger.error("{} error(s) encountered when materialize filter expression : {}",
- errorCollector.getErrorCount(), errorCollector.toErrorString());
- return null;
- }
- // logger.debug("materializedFilter : {}", ExpressionStringBuilder.toString(materializedFilter));
+ if (filterPredicate == null) {
+ ErrorCollector errorCollector = new ErrorCollectorImpl();
+ LogicalExpression materializedFilter = ExpressionTreeMaterializer.materializeFilterExpr(
+ filterExpr, columnStatisticsMap, errorCollector, functionImplementationRegistry);
- Set<LogicalExpression> constantBoundaries = ConstantExpressionIdentifier.getConstantExpressionSet(materializedFilter);
- filterPredicate = (ParquetFilterPredicate) ParquetFilterBuilder.buildParquetFilterPredicate(
- materializedFilter, constantBoundaries, udfUtilities);
-
- if (filterPredicate == null) {
- return null;
- }
+ if (errorCollector.hasErrors()) {
+ logger.error("{} error(s) encountered when materialize filter expression : {}",
+ errorCollector.getErrorCount(), errorCollector.toErrorString());
+ return null;
}
+ // logger.debug("materializedFilter : {}", ExpressionStringBuilder.toString(materializedFilter));
- if (ParquetRGFilterEvaluator.canDrop(filterPredicate, columnStatisticsMap, rowGroup.getRowCount())) {
- continue;
+ Set<LogicalExpression> constantBoundaries = ConstantExpressionIdentifier.getConstantExpressionSet(materializedFilter);
+ filterPredicate = (ParquetFilterPredicate) ParquetFilterBuilder.buildParquetFilterPredicate(
+ materializedFilter, constantBoundaries, udfUtilities);
+
+ if (filterPredicate == null) {
+ return null;
}
+ }
- qualifiedRGs.add(rowGroup);
- qualifiedFileNames.add(file.getPath()); // TODO : optimize when 1 file contains m row groups.
+ if (ParquetRGFilterEvaluator.canDrop(filterPredicate, columnStatisticsMap, rowGroup.getRowCount())) {
+ continue;
}
+
+ qualifiedRGs.add(rowGroup);
+ qualifiedFileNames.add(rowGroup.getPath()); // TODO : optimize when 1 file contains m row groups.
}
- if (qualifiedFileNames.size() == fileSet.size() ) {
+
+ if (qualifiedRGs.size() == rowGroupInfos.size() ) {
// There is no reduction of rowGroups. Return the original groupScan.
logger.debug("applyFilter does not have any pruning!");
return null;
} else if (qualifiedFileNames.size() == 0) {
logger.warn("All rowgroups have been filtered out. Add back one to get schema from scannner");
- qualifiedFileNames.add(fileSet.iterator().next());
+ RowGroupInfo rg = rowGroupInfos.iterator().next();
+ qualifiedFileNames.add(rg.getPath());
+ qualifiedRGs.add(rg);
}
try {
FileSelection newSelection = new FileSelection(null, Lists.newArrayList(qualifiedFileNames), getSelectionRoot(), cacheFileRoot, false);
- logger.info("applyFilter {} reduce parquet file # from {} to {}", ExpressionStringBuilder.toString(filterExpr), fileSet.size(), qualifiedFileNames.size());
- return this.clone(newSelection);
+ logger.info("applyFilter {} reduce parquet rowgroup # from {} to {}", ExpressionStringBuilder.toString(filterExpr), rowGroupInfos.size(), qualifiedRGs.size());
+ ParquetGroupScan clonegroupscan = this.clone(newSelection);
+ clonegroupscan.rowGroupInfos = qualifiedRGs;
+ clonegroupscan.updatePartitionColTypeMap();
+ return clonegroupscan;
+
} catch (IOException e) {
logger.warn("Could not apply filter prune due to Exception : {}", e);
return null;
http://git-wip-us.apache.org/repos/asf/drill/blob/3036d370/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java
index fa5c8b2..8f56c45 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java
@@ -384,6 +384,16 @@ public class TestParquetFilterPushDown extends PlanTestBase {
}
+ @Test
+ public void testMultiRowGroup() throws Exception {
+ // multirowgroup is a parquet file with 2 rowgroups inside. One with a = 1 and the other with a = 2;
+ // FilterPushDown should be able to remove the rowgroup with a = 1 from the scan operator.
+ final String sql = String.format("select * from dfs_test.`%s/parquet/multirowgroup.parquet` where a > 1", TEST_RES_PATH);
+ final String[] expectedPlan = {"numRowGroups=1"};
+ final String[] excludedPlan = {};
+ PlanTestBase.testPlanMatchingPatterns(sql, expectedPlan, excludedPlan);
+ }
+
//////////////////////////////////////////////////////////////////////////////////////////////////
// Some test helper functions.
//////////////////////////////////////////////////////////////////////////////////////////////////
http://git-wip-us.apache.org/repos/asf/drill/blob/3036d370/exec/java-exec/src/test/resources/parquet/multirowgroup.parquet
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/parquet/multirowgroup.parquet b/exec/java-exec/src/test/resources/parquet/multirowgroup.parquet
new file mode 100644
index 0000000..1cb5551
Binary files /dev/null and b/exec/java-exec/src/test/resources/parquet/multirowgroup.parquet differ
[03/11] drill git commit: DRILL-5717: Let some test cases be Local or
TimeZone independent.
Posted by ar...@apache.org.
DRILL-5717: Let some test cases be Local or TimeZone independent.
closes #904
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/17ca6181
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/17ca6181
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/17ca6181
Branch: refs/heads/master
Commit: 17ca6181ba924837d493ae789637b3d4514bd560
Parents: 496c97d
Author: weijie.tong <we...@alipay.com>
Authored: Sun Aug 20 20:07:41 2017 +0800
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:05:13 2017 +0200
----------------------------------------------------------------------
.../java/org/apache/drill/exec/ExecTest.java | 35 ++-
.../drill/exec/fn/impl/TestCastFunctions.java | 22 +-
.../drill/exec/fn/impl/TestDateFunctions.java | 226 ++++++++++---------
.../exec/fn/impl/TestNewDateFunctions.java | 8 +-
.../fn/impl/testing/TestDateConversions.java | 19 +-
.../exec/fn/interp/TestConstantFolding.java | 17 +-
.../complex/writer/TestExtendedTypes.java | 19 +-
.../expression/fn/JodaDateValidatorTest.java | 7 +-
8 files changed, 200 insertions(+), 153 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java
index 4200f89..418e4a4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java
@@ -19,11 +19,13 @@ package org.apache.drill.exec;
import com.codahale.metrics.MetricRegistry;
import com.google.common.io.Files;
+import mockit.Mock;
+import mockit.MockUp;
import mockit.NonStrictExpectations;
-import org.apache.commons.io.FileUtils;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
+import org.apache.commons.io.FileUtils;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.parser.ExprLexer;
@@ -35,18 +37,20 @@ import org.apache.drill.exec.metrics.DrillMetrics;
import org.apache.drill.exec.physical.impl.OperatorCreatorRegistry;
import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.server.options.SystemOptionManager;
import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider;
import org.apache.drill.exec.util.GuavaPatcher;
import org.apache.drill.test.DrillTest;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
+import org.joda.time.DateTimeUtils;
+import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.BeforeClass;
-
import java.io.File;
import java.io.IOException;
+import java.text.DateFormatSymbols;
+import java.util.Locale;
public class ExecTest extends DrillTest {
@@ -120,4 +124,29 @@ public class ExecTest extends DrillTest {
return ret.e;
}
+ /**
+ * This utility is to mock the method DateTimeUtils.getDateFormatSymbols()
+ * to mock the current local as US.
+ */
+ public static void mockUsDateFormatSymbols() {
+ new MockUp<DateTimeUtils>() {
+ @Mock
+ public DateFormatSymbols getDateFormatSymbols(Locale locale) {
+ return new DateFormatSymbols(Locale.US);
+ }
+ };
+ }
+
+ /**
+ * This utility is to mock the method DateTimeZone.getDefault() to
+ * mock current timezone as UTC.
+ */
+ public static void mockUtcDateTimeZone() {
+ new MockUp<DateTimeZone>() {
+ @Mock
+ public DateTimeZone getDefault() {
+ return DateTimeZone.UTC;
+ }
+ };
+ }
}
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
index 42a048e..4aeb396 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.fn.impl;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import mockit.integration.junit4.JMockit;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.categories.UnlikelyTest;
@@ -26,11 +27,12 @@ import org.apache.drill.common.util.FileUtils;
import org.joda.time.DateTime;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.junit.runner.RunWith;
import java.math.BigDecimal;
import java.util.List;
import java.util.Map;
+@RunWith(JMockit.class)
@Category({UnlikelyTest.class, SqlFunctionTest.class})
public class TestCastFunctions extends BaseTestQuery {
@@ -77,16 +79,18 @@ public class TestCastFunctions extends BaseTestQuery {
@Test // DRILL-3769
public void testToDateForTimeStamp() throws Exception {
- final String query = "select to_date(to_timestamp(-1)) as col \n" +
- "from (values(1))";
+ mockUtcDateTimeZone();
+
+ final String query = "select to_date(to_timestamp(-1)) as col \n"
+ + "from (values(1))";
testBuilder()
- .sqlQuery(query)
- .ordered()
- .baselineColumns("col")
- .baselineValues(new DateTime(1969, 12, 31, 0, 0))
- .build()
- .run();
+ .sqlQuery(query)
+ .ordered()
+ .baselineColumns("col")
+ .baselineValues(new DateTime(1969, 12, 31, 0, 0))
+ .build()
+ .run();
}
@Test
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
index a491131..622d8f4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
- *
+ * <p>
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,11 +17,9 @@
*/
package org.apache.drill.exec.fn.impl;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.util.List;
-
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
+import mockit.integration.junit4.JMockit;
import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.categories.UnlikelyTest;
import org.apache.drill.common.util.FileUtils;
@@ -34,127 +32,131 @@ import org.apache.drill.exec.server.Drillbit;
import org.apache.drill.exec.server.RemoteServiceSet;
import org.apache.drill.exec.vector.ValueVector;
import org.joda.time.LocalDate;
-import org.joda.time.LocalTime;
import org.joda.time.LocalDateTime;
+import org.joda.time.LocalTime;
import org.junit.Ignore;
import org.junit.Test;
-
-import com.google.common.base.Charsets;
-import com.google.common.io.Files;
import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import java.util.List;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+@RunWith(JMockit.class)
@Category({UnlikelyTest.class, SqlFunctionTest.class})
public class TestDateFunctions extends PopUnitTestBase {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestDateFunctions.class);
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestDateFunctions.class);
- public void testCommon(String[] expectedResults, String physicalPlan, String resourceFile) throws Exception {
- try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
- Drillbit bit = new Drillbit(CONFIG, serviceSet);
- DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
- // run query.
- bit.run();
- client.connect();
- List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
- Files.toString(FileUtils.getResourceAsFile(physicalPlan), Charsets.UTF_8)
- .replace("#{TEST_FILE}", resourceFile));
+ public void testCommon(String[] expectedResults, String physicalPlan, String resourceFile) throws Exception {
+ try (RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
+ Drillbit bit = new Drillbit(CONFIG, serviceSet);
+ DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
- RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+ // run query.
+ bit.run();
+ client.connect();
+ List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
+ Files.toString(FileUtils.getResourceAsFile(physicalPlan), Charsets.UTF_8)
+ .replace("#{TEST_FILE}", resourceFile));
- QueryDataBatch batch = results.get(0);
- assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
+ RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
+ QueryDataBatch batch = results.get(0);
+ assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
- int i = 0;
- for (VectorWrapper<?> v : batchLoader) {
- ValueVector.Accessor accessor = v.getValueVector().getAccessor();
- System.out.println(accessor.getObject(0));
- assertEquals( expectedResults[i++], accessor.getObject(0).toString());
- }
-
- batchLoader.clear();
- for(QueryDataBatch b : results){
- b.release();
- }
- }
- }
-
- @Test
- @Ignore("relies on particular timezone")
- public void testDateIntervalArithmetic() throws Exception {
- String expectedResults[] = {"2009-02-23T00:00:00.000-08:00",
- "2008-02-24T00:00:00.000-08:00",
- "1970-01-01T13:20:33.000-08:00",
- "2008-02-24T12:00:00.000-08:00",
- "2009-04-23T12:00:00.000-07:00",
- "2008-02-24T12:00:00.000-08:00",
- "2009-04-23T12:00:00.000-07:00",
- "2009-02-23T00:00:00.000-08:00",
- "2008-02-24T00:00:00.000-08:00",
- "1970-01-01T13:20:33.000-08:00",
- "2008-02-24T12:00:00.000-08:00",
- "2009-04-23T12:00:00.000-07:00",
- "2008-02-24T12:00:00.000-08:00",
- "2009-04-23T12:00:00.000-07:00"};
- testCommon(expectedResults, "/functions/date/date_interval_arithmetic.json", "/test_simple_date.json");
- }
-
- @Test
- public void testDateDifferenceArithmetic() throws Exception {
-
- String[] expectedResults = {"P365D",
- "P-366DT-60S",
- "PT39600S"};
- testCommon(expectedResults, "/functions/date/date_difference_arithmetic.json", "/test_simple_date.json");
- }
-
- @Test
- public void testAge() throws Exception {
- String[] expectedResults = { "P109M16DT82800S",
- "P172M27D",
- "P-172M-27D",
- "P-39M-18DT-63573S"};
- testCommon(expectedResults, "/functions/date/age.json", "/test_simple_date.json");
- }
-
- @Test
- public void testIntervalArithmetic() throws Exception {
-
- String expectedResults[] = {"P2Y2M",
- "P2DT3723S",
- "P2M",
- "PT3723S",
- "P28M",
- "PT7206S",
- "P7M",
- "PT1801.500S",
- "P33M18D",
- "PT8647.200S",
- "P6M19DT86399.999S",
- "PT1715.714S"};
-
- testCommon(expectedResults, "/functions/date/interval_arithmetic.json", "/test_simple_date.json");
- }
-
- @Test
- public void testToChar() throws Exception {
-
- String expectedResults[] = {(new LocalDate(2008, 2, 23)).toString("yyyy-MMM-dd"),
- (new LocalTime(12, 20, 30)).toString("HH mm ss"),
- (new LocalDateTime(2008, 2, 23, 12, 0, 0)).toString("yyyy MMM dd HH:mm:ss")};
- testCommon(expectedResults, "/functions/date/to_char.json", "/test_simple_date.json");
- }
+ int i = 0;
+ for (VectorWrapper<?> v : batchLoader) {
- @Test
- @Ignore("relies on particular time zone")
- public void testToDateType() throws Exception {
- String expectedResults[] = {"2008-02-23T00:00:00.000-08:00",
- "1970-01-01T12:20:30.000-08:00",
- "2008-02-23T12:00:00.000-08:00",
- "2008-02-23T12:00:00.000-08:00"};
+ ValueVector.Accessor accessor = v.getValueVector().getAccessor();
+ System.out.println(accessor.getObject(0));
+ assertEquals(expectedResults[i++], accessor.getObject(0).toString());
+ }
- testCommon(expectedResults, "/functions/date/to_date_type.json", "/test_simple_date.json");
+ batchLoader.clear();
+ for (QueryDataBatch b : results) {
+ b.release();
+ }
}
+ }
+
+ @Test
+ @Ignore("relies on particular timezone")
+ public void testDateIntervalArithmetic() throws Exception {
+ String expectedResults[] = {"2009-02-23T00:00:00.000-08:00",
+ "2008-02-24T00:00:00.000-08:00",
+ "1970-01-01T13:20:33.000-08:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00",
+ "2009-02-23T00:00:00.000-08:00",
+ "2008-02-24T00:00:00.000-08:00",
+ "1970-01-01T13:20:33.000-08:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00"};
+ testCommon(expectedResults, "/functions/date/date_interval_arithmetic.json", "/test_simple_date.json");
+ }
+
+ @Test
+ public void testDateDifferenceArithmetic() throws Exception {
+
+ String[] expectedResults = {"P365D",
+ "P-366DT-60S",
+ "PT39600S"};
+ testCommon(expectedResults, "/functions/date/date_difference_arithmetic.json", "/test_simple_date.json");
+ }
+
+ @Test
+ public void testAge() throws Exception {
+ String[] expectedResults = {"P109M16DT82800S",
+ "P172M27D",
+ "P-172M-27D",
+ "P-39M-18DT-63573S"};
+ testCommon(expectedResults, "/functions/date/age.json", "/test_simple_date.json");
+ }
+
+ @Test
+ public void testIntervalArithmetic() throws Exception {
+
+ String expectedResults[] = {"P2Y2M",
+ "P2DT3723S",
+ "P2M",
+ "PT3723S",
+ "P28M",
+ "PT7206S",
+ "P7M",
+ "PT1801.500S",
+ "P33M18D",
+ "PT8647.200S",
+ "P6M19DT86399.999S",
+ "PT1715.714S"};
+
+ testCommon(expectedResults, "/functions/date/interval_arithmetic.json", "/test_simple_date.json");
+ }
+
+ @Test
+ public void testToChar() throws Exception {
+ mockUsDateFormatSymbols();
+
+ String expectedResults[] = {(new LocalDate(2008, 2, 23)).toString("yyyy-MMM-dd"),
+ (new LocalTime(12, 20, 30)).toString("HH mm ss"),
+ (new LocalDateTime(2008, 2, 23, 12, 0, 0)).toString("yyyy MMM dd HH:mm:ss")};
+ testCommon(expectedResults, "/functions/date/to_char.json", "/test_simple_date.json");
+ }
+
+ @Test
+ @Ignore("relies on particular time zone")
+ public void testToDateType() throws Exception {
+ String expectedResults[] = {"2008-02-23T00:00:00.000-08:00",
+ "1970-01-01T12:20:30.000-08:00",
+ "2008-02-23T12:00:00.000-08:00",
+ "2008-02-23T12:00:00.000-08:00"};
+
+ testCommon(expectedResults, "/functions/date/to_date_type.json", "/test_simple_date.json");
+ }
}
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
index 49eb795..e339117 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
@@ -26,8 +26,6 @@ import org.joda.time.format.DateTimeFormatter;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import java.sql.Date;
-
@Category({UnlikelyTest.class, SqlFunctionTest.class})
public class TestNewDateFunctions extends BaseTestQuery {
DateTime date;
@@ -55,9 +53,9 @@ public class TestNewDateFunctions extends BaseTestQuery {
.sqlQuery("select case when isdate(date1) then cast(date1 as date) else null end res1 from " + dateValues)
.unOrdered()
.baselineColumns("res1")
- .baselineValues(new DateTime(Date.valueOf("1900-01-01").getTime()))
- .baselineValues(new DateTime(Date.valueOf("3500-01-01").getTime()))
- .baselineValues(new DateTime(Date.valueOf("2000-12-31").getTime()))
+ .baselineValues(new DateTime(1900, 1, 1, 0, 0))
+ .baselineValues(new DateTime(3500, 1, 1, 0, 0))
+ .baselineValues(new DateTime(2000, 12, 31, 0, 0))
.baselineValues(new Object[] {null})
.baselineValues(new Object[] {null})
.baselineValues(new Object[] {null})
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
index 27599c2..4da6db3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/TestDateConversions.java
@@ -16,6 +16,7 @@
*/
package org.apache.drill.exec.fn.impl.testing;
+import mockit.integration.junit4.JMockit;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.categories.UnlikelyTest;
@@ -24,15 +25,15 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
+import org.junit.runner.RunWith;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
-
import static org.hamcrest.CoreMatchers.startsWith;
import static org.junit.Assert.assertThat;
+@RunWith(JMockit.class)
@Category({UnlikelyTest.class, SqlFunctionTest.class})
public class TestDateConversions extends BaseTestQuery {
@@ -104,6 +105,8 @@ public class TestDateConversions extends BaseTestQuery {
@Test
public void testJodaTime() throws Exception {
+ mockUsDateFormatSymbols();
+
String query = String.format("SELECT to_time(time1, 'H:m:ss') = "
+ "to_time(time2, 'h:m:ssa') as col1, "
+ "to_time(time1, 'H:m:ss') = "
@@ -121,6 +124,8 @@ public class TestDateConversions extends BaseTestQuery {
@Test
public void testPostgresTime() throws Exception {
+ mockUsDateFormatSymbols();
+
String query = String.format("SELECT sql_to_time(time1, 'HH24:MI:SS') = "
+ "sql_to_time(time2, 'HH12:MI:SSam') as col1, "
+ "sql_to_time(time1, 'HH24:MI:SS') = "
@@ -138,6 +143,8 @@ public class TestDateConversions extends BaseTestQuery {
@Test
public void testPostgresDateTime() throws Exception {
+ mockUsDateFormatSymbols();
+
String query = String.format("SELECT sql_to_timestamp(time1, 'yyyy-DD-MMHH24:MI:SS') = "
+ "sql_to_timestamp(time2, 'DDMMyyyyHH12:MI:SSam') as col1, "
+ "sql_to_timestamp(time1, 'yyyy-DD-MMHH24:MI:SS') = "
@@ -151,11 +158,12 @@ public class TestDateConversions extends BaseTestQuery {
.baselineValues(true, true)
.baselineValues(false, true)
.go();
-
}
@Test
public void testJodaDateTime() throws Exception {
+ mockUsDateFormatSymbols();
+
String query = String.format("SELECT to_timestamp(time1, 'yyyy-dd-MMH:m:ss') = "
+ "to_timestamp(time2, 'ddMMyyyyh:m:ssa') as col1, "
+ "to_timestamp(time1, 'yyyy-dd-MMH:m:ss') = "
@@ -173,6 +181,8 @@ public class TestDateConversions extends BaseTestQuery {
@Test
public void testJodaDateTimeNested() throws Exception {
+ mockUsDateFormatSymbols();
+
String query = String.format("SELECT date_add(to_date(time1, concat('yyyy-dd-MM','H:m:ss')), 22)= "
+ "date_add(to_date(time2, concat('ddMMyyyy', 'h:m:ssa')), 22) as col1, "
+ "date_add(to_date(time1, concat('yyyy-dd-MM', 'H:m:ss')), 22) = "
@@ -186,11 +196,12 @@ public class TestDateConversions extends BaseTestQuery {
.baselineValues(true, true)
.baselineValues(false, true)
.go();
-
}
@Test
public void testPostgresDateTimeNested() throws Exception {
+ mockUsDateFormatSymbols();
+
String query = String.format("SELECT date_add(sql_to_date(time1, concat('yyyy-DD-MM', 'HH24:MI:SS')), 22) = "
+ "date_add(sql_to_date(time2, concat('DDMMyyyy', 'HH12:MI:SSam')), 22) as col1, "
+ "date_add(sql_to_date(time1, concat('yyyy-DD-MM', 'HH24:MI:SS')), 10) = "
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
index 5f55d2d..206bf97 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/TestConstantFolding.java
@@ -17,10 +17,9 @@
******************************************************************************/
package org.apache.drill.exec.fn.interp;
-import java.io.File;
-import java.io.PrintWriter;
-import java.util.List;
-
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import mockit.integration.junit4.JMockit;
import org.apache.drill.PlanTestBase;
import org.apache.drill.categories.SqlTest;
import org.apache.drill.exec.planner.physical.PlannerSettings;
@@ -29,10 +28,12 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import java.io.File;
+import java.io.PrintWriter;
+import java.util.List;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
-
+@RunWith(JMockit.class)
@Category(SqlTest.class)
public class TestConstantFolding extends PlanTestBase {
@@ -117,6 +118,8 @@ public class TestConstantFolding extends PlanTestBase {
@Test
public void testConstantFolding_allTypes() throws Exception {
+ mockUsDateFormatSymbols();
+
try {
test("alter session set `store.json.all_text_mode` = true;");
test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
index 4d2da29..0710818 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
@@ -17,14 +17,7 @@
*/
package org.apache.drill.exec.vector.complex.writer;
-import static org.junit.Assert.assertEquals;
-
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.List;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
+import mockit.integration.junit4.JMockit;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.common.util.TestTools;
import org.apache.drill.exec.ExecConstants;
@@ -32,12 +25,21 @@ import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.test.OperatorFixture;
import org.junit.Assert;
import org.junit.Test;
+import org.junit.runner.RunWith;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import static org.junit.Assert.assertEquals;
+@RunWith(JMockit.class)
public class TestExtendedTypes extends BaseTestQuery {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestExtendedTypes.class);
@Test
public void checkReadWriteExtended() throws Exception {
+ mockUtcDateTimeZone();
final String originalFile = "${WORKING_PATH}/src/test/resources/vector/complex/extended.json".replaceAll(
Pattern.quote("${WORKING_PATH}"),
@@ -53,7 +55,6 @@ public class TestExtendedTypes extends BaseTestQuery {
// check query of table.
test("select * from dfs_test.tmp.`%s`", newTable);
-
// check that original file and new file match.
final byte[] originalData = Files.readAllBytes(Paths.get(originalFile));
final byte[] newData = Files.readAllBytes(Paths.get(BaseTestQuery.getDfsTestTmpSchemaLocation() + '/' + newTable
http://git-wip-us.apache.org/repos/asf/drill/blob/17ca6181/logical/src/test/java/org/apache/drill/common/expression/fn/JodaDateValidatorTest.java
----------------------------------------------------------------------
diff --git a/logical/src/test/java/org/apache/drill/common/expression/fn/JodaDateValidatorTest.java b/logical/src/test/java/org/apache/drill/common/expression/fn/JodaDateValidatorTest.java
index 8398bcf..2bd4fd4 100644
--- a/logical/src/test/java/org/apache/drill/common/expression/fn/JodaDateValidatorTest.java
+++ b/logical/src/test/java/org/apache/drill/common/expression/fn/JodaDateValidatorTest.java
@@ -23,9 +23,8 @@ import org.joda.time.format.DateTimeFormatter;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
-
+import java.util.Locale;
import java.util.Map;
-
import static org.apache.drill.common.expression.fn.JodaDateValidator.toJodaFormat;
import static org.joda.time.DateTime.parse;
import static org.joda.time.format.DateTimeFormat.forPattern;
@@ -125,7 +124,7 @@ public class JodaDateValidatorTest {
DateTime date = parseDateFromPostgres(hours + ":" + minutes + ":" + seconds + " am", "hh12:mi:ss am");
Assert.assertTrue(date.getHourOfDay() == hours &&
date.getMinuteOfHour() == minutes &&
- date.getSecondOfMinute() == Integer.parseInt(seconds));
+ date.getSecondOfMinute() == Integer.parseInt(seconds));
}
@Test
@@ -197,7 +196,7 @@ public class JodaDateValidatorTest {
private DateTime parseDateFromPostgres(String date, String pattern) {
String jodaFormat = toJodaFormat(pattern);
- DateTimeFormatter format = forPattern(jodaFormat);
+ DateTimeFormatter format = forPattern(jodaFormat).withLocale(Locale.US);
return parse(date, format).withZoneRetainFields(DateTimeZone.UTC);
}
}
[04/11] drill git commit: DRILL-5822: The query with "SELECT *" with
"ORDER BY" clause and `planner.slice_target`=1 doesn't preserve column order
Posted by ar...@apache.org.
DRILL-5822: The query with "SELECT *" with "ORDER BY" clause and `planner.slice_target`=1 doesn't preserve column order
- The commit for DRILL-847 is oudated. There is no need to canonicalize the batch or container since RecordBatchLoader
swallows the "schema change" for now if two batches have different column ordering.
closes #1017
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/c1118a3d
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/c1118a3d
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/c1118a3d
Branch: refs/heads/master
Commit: c1118a3d9a74cf24f28bc69efca2c21d2a6d5b1d
Parents: 17ca618
Author: Vitalii Diravka <vi...@gmail.com>
Authored: Thu Oct 26 18:07:33 2017 +0000
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:05:29 2017 +0200
----------------------------------------------------------------------
.../exec/physical/impl/TopN/TopNBatch.java | 11 +++------
.../impl/mergereceiver/MergingRecordBatch.java | 7 +-----
.../physical/impl/sort/RecordBatchData.java | 8 +------
.../impl/sort/SortRecordBatchBuilder.java | 6 -----
.../drill/exec/record/RecordBatchLoader.java | 18 --------------
.../drill/exec/record/VectorContainer.java | 25 --------------------
.../java/org/apache/drill/TestStarQueries.java | 17 +++++++++++++
7 files changed, 22 insertions(+), 70 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/c1118a3d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
index 950e1fe..dcf67d4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
@@ -238,7 +238,6 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
}
boolean success = false;
try {
- batch.canonicalize();
if (priorityQueue == null) {
assert !schemaChanged;
priorityQueue = createNewPriorityQueue(context, config.getOrderings(), new ExpandableHyperContainer(batch.getContainer()), MAIN_MAPPING, LEFT_MAPPING, RIGHT_MAPPING);
@@ -323,7 +322,6 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
selectionVector4.clear();
c.clear();
VectorContainer newQueue = new VectorContainer();
- builder.canonicalize();
builder.build(context, newQueue);
priorityQueue.resetQueue(newQueue, builder.getSv4().createNewWrapperCurrent());
builder.getSv4().clear();
@@ -414,16 +412,13 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
selectionVector4.clear();
c.clear();
final VectorContainer oldSchemaContainer = new VectorContainer(oContext);
- builder.canonicalize();
builder.build(context, oldSchemaContainer);
oldSchemaContainer.setRecordCount(builder.getSv4().getCount());
final VectorContainer newSchemaContainer = SchemaUtil.coerceContainer(oldSchemaContainer, this.schema, oContext);
- // Canonicalize new container since we canonicalize incoming batches before adding to queue.
- final VectorContainer canonicalizedContainer = VectorContainer.canonicalize(newSchemaContainer);
- canonicalizedContainer.buildSchema(SelectionVectorMode.FOUR_BYTE);
+ newSchemaContainer.buildSchema(SelectionVectorMode.FOUR_BYTE);
priorityQueue.cleanup();
- priorityQueue = createNewPriorityQueue(context, config.getOrderings(), canonicalizedContainer, MAIN_MAPPING, LEFT_MAPPING, RIGHT_MAPPING);
- priorityQueue.resetQueue(canonicalizedContainer, builder.getSv4().createNewWrapperCurrent());
+ priorityQueue = createNewPriorityQueue(context, config.getOrderings(), newSchemaContainer, MAIN_MAPPING, LEFT_MAPPING, RIGHT_MAPPING);
+ priorityQueue.resetQueue(newSchemaContainer, builder.getSv4().createNewWrapperCurrent());
} finally {
builder.clear();
builder.close();
http://git-wip-us.apache.org/repos/asf/drill/blob/c1118a3d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
index eff1ae9..ec945d6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
@@ -325,12 +325,8 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
// after this point all batches have been released and their bytebuf are in batchLoaders
- // Canonicalize each incoming batch, so that vectors are alphabetically sorted based on SchemaPath.
- for (final RecordBatchLoader loader : batchLoaders) {
- loader.canonicalize();
- }
-
// Ensure all the incoming batches have the identical schema.
+ // Note: RecordBatchLoader permutes the columns to obtain the same columns order for all batches.
if (!isSameSchemaAmongBatches(batchLoaders)) {
context.fail(new SchemaChangeException("Incoming batches for merging receiver have different schemas!"));
return IterOutcome.STOP;
@@ -581,7 +577,6 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
} catch (final IOException e) {
throw new DrillRuntimeException(e);
}
- outgoingContainer = VectorContainer.canonicalize(outgoingContainer);
outgoingContainer.buildSchema(SelectionVectorMode.NONE);
}
http://git-wip-us.apache.org/repos/asf/drill/blob/c1118a3d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java
index 0cd55eb..6de4df6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/RecordBatchData.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -65,12 +65,6 @@ public class RecordBatchData {
container.buildSchema(batch.getSchema().getSelectionVectorMode());
}
- public void canonicalize() {
- SelectionVectorMode mode = container.getSchema().getSelectionVectorMode();
- container = VectorContainer.canonicalize(container);
- container.buildSchema(mode);
- }
-
public int getRecordCount() {
return recordCount;
}
http://git-wip-us.apache.org/repos/asf/drill/blob/c1118a3d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java
index 999fb04..6b3de25 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/sort/SortRecordBatchBuilder.java
@@ -132,12 +132,6 @@ public class SortRecordBatchBuilder implements AutoCloseable {
recordCount += rbd.getRecordCount();
}
- public void canonicalize() {
- for (RecordBatchData batch : batches.values()) {
- batch.canonicalize();
- }
- }
-
public boolean isEmpty() {
return batches.isEmpty();
}
http://git-wip-us.apache.org/repos/asf/drill/blob/c1118a3d/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
index 20b5cb5..3e6bf64 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
@@ -270,22 +270,4 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
resetRecordCount();
}
- /**
- * Sorts vectors into canonical order (by field name). Updates schema and
- * internal vector container.
- */
- public void canonicalize() {
- //logger.debug( "RecordBatchLoader : before schema " + schema);
- container = VectorContainer.canonicalize(container);
-
- // rebuild the schema.
- SchemaBuilder b = BatchSchema.newBuilder();
- for(final VectorWrapper<?> v : container){
- b.addField(v.getField());
- }
- b.setSelectionVectorMode(BatchSchema.SelectionVectorMode.NONE);
- this.schema = b.build();
-
- //logger.debug( "RecordBatchLoader : after schema " + schema);
- }
}
http://git-wip-us.apache.org/repos/asf/drill/blob/c1118a3d/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index abcb846..9564f11 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -201,31 +201,6 @@ public class VectorContainer implements VectorAccessible {
return vc;
}
- /**
- * Sorts vectors into canonical order (by field name) in new VectorContainer.
- */
- public static VectorContainer canonicalize(VectorContainer original) {
- VectorContainer vc = new VectorContainer();
- List<VectorWrapper<?>> canonicalWrappers = new ArrayList<>(original.wrappers);
- // Sort list of VectorWrapper alphabetically based on SchemaPath.
- Collections.sort(canonicalWrappers, new Comparator<VectorWrapper<?>>() {
- @Override
- public int compare(VectorWrapper<?> v1, VectorWrapper<?> v2) {
- return v1.getField().getName().compareTo(v2.getField().getName());
- }
- });
-
- for (VectorWrapper<?> w : canonicalWrappers) {
- if (w.isHyper()) {
- vc.add(w.getValueVectors());
- } else {
- vc.add(w.getValueVector());
- }
- }
- vc.allocator = original.allocator;
- return vc;
- }
-
private void cloneAndTransfer(VectorWrapper<?> wrapper) {
wrappers.add(wrapper.cloneAndTransfer(getAllocator()));
}
http://git-wip-us.apache.org/repos/asf/drill/blob/c1118a3d/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index bdb080c..b4ac11f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -531,4 +531,21 @@ public class TestStarQueries extends BaseTestQuery{
.run();
}
+ @Test // DRILL-5822
+ public void testSchemaForParallelizedStarOrderBy() throws Exception {
+ final String query = "select * from cp.`tpch/region.parquet` order by r_name";
+ final BatchSchema expectedSchema = new SchemaBuilder()
+ .add("r_regionkey", TypeProtos.MinorType.INT)
+ .add("r_name",TypeProtos.MinorType.VARCHAR)
+ .add("r_comment", TypeProtos.MinorType.VARCHAR)
+ .build();
+
+ testBuilder()
+ .sqlQuery(query)
+ .optionSettingQueriesForTestQuery("alter session set `planner.slice_target`=1")
+ .schemaBaseLine(expectedSchema)
+ .build()
+ .run();
+ }
+
}
[02/11] drill git commit: DRILL-5337: OpenTSDB storage plugin
Posted by ar...@apache.org.
DRILL-5337: OpenTSDB storage plugin
closes #774
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/496c97d1
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/496c97d1
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/496c97d1
Branch: refs/heads/master
Commit: 496c97d14eb428a5aff74e82d662a0da6930e94f
Parents: 29e0547
Author: Vlad Storona <vs...@cybervisiontech.com>
Authored: Fri Nov 25 20:28:02 2016 +0200
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:04:54 2017 +0200
----------------------------------------------------------------------
contrib/pom.xml | 1 +
contrib/storage-opentsdb/README.md | 69 +++++
contrib/storage-opentsdb/pom.xml | 80 ++++++
.../drill/exec/store/openTSDB/Constants.java | 32 +++
.../exec/store/openTSDB/DrillOpenTSDBTable.java | 81 ++++++
.../store/openTSDB/OpenTSDBBatchCreator.java | 53 ++++
.../exec/store/openTSDB/OpenTSDBGroupScan.java | 169 ++++++++++++
.../store/openTSDB/OpenTSDBRecordReader.java | 258 +++++++++++++++++++
.../exec/store/openTSDB/OpenTSDBScanSpec.java | 42 +++
.../store/openTSDB/OpenTSDBStoragePlugin.java | 77 ++++++
.../openTSDB/OpenTSDBStoragePluginConfig.java | 77 ++++++
.../exec/store/openTSDB/OpenTSDBSubScan.java | 132 ++++++++++
.../apache/drill/exec/store/openTSDB/Util.java | 66 +++++
.../exec/store/openTSDB/client/OpenTSDB.java | 50 ++++
.../store/openTSDB/client/OpenTSDBTypes.java | 28 ++
.../exec/store/openTSDB/client/Schema.java | 124 +++++++++
.../exec/store/openTSDB/client/Service.java | 55 ++++
.../store/openTSDB/client/query/DBQuery.java | 148 +++++++++++
.../exec/store/openTSDB/client/query/Query.java | 187 ++++++++++++++
.../openTSDB/client/services/ServiceImpl.java | 174 +++++++++++++
.../exec/store/openTSDB/dto/ColumnDTO.java | 63 +++++
.../exec/store/openTSDB/dto/MetricDTO.java | 77 ++++++
.../openTSDB/schema/OpenTSDBSchemaFactory.java | 77 ++++++
.../resources/bootstrap-storage-plugins.json | 9 +
.../src/main/resources/drill-module.conf | 21 ++
.../drill/store/openTSDB/TestDataHolder.java | 247 ++++++++++++++++++
.../store/openTSDB/TestOpenTSDBPlugin.java | 189 ++++++++++++++
distribution/pom.xml | 5 +
28 files changed, 2591 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/pom.xml b/contrib/pom.xml
index 2014923..d4ad434 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -38,6 +38,7 @@
<module>storage-mongo</module>
<module>storage-jdbc</module>
<module>storage-kudu</module>
+ <module>storage-opentsdb</module>
<module>sqlline</module>
<module>data</module>
<module>gis</module>
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/README.md
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/README.md b/contrib/storage-opentsdb/README.md
new file mode 100644
index 0000000..0c616b5
--- /dev/null
+++ b/contrib/storage-opentsdb/README.md
@@ -0,0 +1,69 @@
+# drill-storage-openTSDB
+
+Implementation of TSDB storage plugin. Plugin uses REST API to work with TSDB.
+
+For more information about openTSDB follow this link <http://opentsdb.net>
+
+There is list of required params:
+
+* metric - The name of a metric stored in the db.
+
+* start - The start time for the query. This can be a relative or absolute timestamp.
+
+* aggregator - The name of an aggregation function to use.
+
+optional param is:
+
+* downsample - An optional downsampling function to reduce the amount of data returned.
+
+* end - An end time for the query. If not supplied, the TSD will assume the local system time on the server.
+This may be a relative or absolute timestamp. This param is optional, and if it isn't specified we will send null
+to the db in this field, but in this case db will assume the local system time on the server.
+
+List of supported aggregators
+
+<http://opentsdb.net/docs/build/html/user_guide/query/aggregators.html>
+
+List of supported time
+
+<http://opentsdb.net/docs/build/html/user_guide/query/dates.html>
+
+Params must be specified in FROM clause of the query separated by commas. For example
+
+`openTSDB.(metric=metric_name, start=4d-ago, aggregator=sum)`
+
+Supported queries for now are listed below:
+
+```
+USE openTSDB
+```
+
+```
+SHOW tables
+```
+Will print available metrics. Max number of the printed results is a Integer.MAX value
+
+```
+SELECT * FROM openTSDB. `(metric=warp.speed.test, start=47y-ago, aggregator=sum)`
+```
+Return aggregated elements from `warp.speed.test` table since 47y-ago
+
+```
+SELECT * FROM openTSDB.`(metric=warp.speed.test, aggregator=avg, start=47y-ago)`
+```
+Return aggregated elements from `warp.speed.test` table
+
+```
+SELECT `timestamp`, sum(`aggregated value`) FROM openTSDB.`(metric=warp.speed.test, aggregator=avg, start=47y-ago)` GROUP BY `timestamp`
+```
+Return aggregated and grouped value by standard drill functions from `warp.speed.test table`, but with the custom aggregator
+
+```
+SELECT * FROM openTSDB.`(metric=warp.speed.test, aggregator=avg, start=47y-ago, downsample=5m-avg)`
+```
+Return aggregated data limited by downsample
+
+```
+SELECT * FROM openTSDB.`(metric=warp.speed.test, aggregator=avg, start=47y-ago, end=1407165403000)`
+```
+Return aggregated data limited by end time
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/pom.xml b/contrib/storage-opentsdb/pom.xml
new file mode 100644
index 0000000..aff1bfa
--- /dev/null
+++ b/contrib/storage-opentsdb/pom.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>drill-contrib-parent</artifactId>
+ <groupId>org.apache.drill.contrib</groupId>
+ <version>1.12.0-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>drill-opentsdb-storage</artifactId>
+
+ <name>contrib/opentsdb-storage-plugin</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.drill.exec</groupId>
+ <artifactId>drill-java-exec</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+
+ <!-- Test dependencies -->
+ <dependency>
+ <groupId>org.apache.drill.exec</groupId>
+ <artifactId>drill-java-exec</artifactId>
+ <classifier>tests</classifier>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.drill</groupId>
+ <artifactId>drill-common</artifactId>
+ <classifier>tests</classifier>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.github.tomakehurst</groupId>
+ <artifactId>wiremock-standalone</artifactId>
+ <version>2.5.1</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.squareup.retrofit2</groupId>
+ <artifactId>retrofit</artifactId>
+ <version>2.1.0</version>
+ </dependency>
+ <dependency>
+ <groupId>com.squareup.retrofit2</groupId>
+ <artifactId>converter-jackson</artifactId>
+ <version>2.1.0</version>
+ </dependency>
+ <dependency>
+ <groupId>com.madhukaraphatak</groupId>
+ <artifactId>java-sizeof_2.11</artifactId>
+ <version>0.1</version>
+ </dependency>
+ </dependencies>
+
+</project>
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Constants.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Constants.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Constants.java
new file mode 100644
index 0000000..c812ff5
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Constants.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+public interface Constants {
+ /**
+ * openTSDB required constants for API call
+ */
+ public static final String DEFAULT_TIME = "47y-ago";
+ public static final String SUM_AGGREGATOR = "sum";
+
+ public static final String TIME_PARAM = "start";
+ public static final String END_TIME_PARAM = "end";
+ public static final String METRIC_PARAM = "metric";
+ public static final String AGGREGATOR_PARAM = "aggregator";
+ public static final String DOWNSAMPLE_PARAM = "downsample";
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/DrillOpenTSDBTable.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/DrillOpenTSDBTable.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/DrillOpenTSDBTable.java
new file mode 100644
index 0000000..bdbb670
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/DrillOpenTSDBTable.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.google.common.collect.Lists;
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.planner.logical.DynamicDrillTable;
+import org.apache.drill.exec.store.openTSDB.client.OpenTSDBTypes;
+import org.apache.drill.exec.store.openTSDB.client.Schema;
+import org.apache.drill.exec.store.openTSDB.dto.ColumnDTO;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+import static org.apache.drill.exec.store.openTSDB.client.OpenTSDBTypes.DOUBLE;
+import static org.apache.drill.exec.store.openTSDB.client.OpenTSDBTypes.STRING;
+import static org.apache.drill.exec.store.openTSDB.client.OpenTSDBTypes.TIMESTAMP;
+
+public class DrillOpenTSDBTable extends DynamicDrillTable {
+
+ private static final Logger log =
+ LoggerFactory.getLogger(DrillOpenTSDBTable.class);
+
+ private final Schema schema;
+
+ public DrillOpenTSDBTable(String storageEngineName, OpenTSDBStoragePlugin plugin, Schema schema, OpenTSDBScanSpec scanSpec) {
+ super(plugin, storageEngineName, scanSpec);
+ this.schema = schema;
+ }
+
+ @Override
+ public RelDataType getRowType(final RelDataTypeFactory typeFactory) {
+ List<String> names = Lists.newArrayList();
+ List<RelDataType> types = Lists.newArrayList();
+ convertToRelDataType(typeFactory, names, types);
+ return typeFactory.createStructType(types, names);
+ }
+
+ private void convertToRelDataType(RelDataTypeFactory typeFactory, List<String> names, List<RelDataType> types) {
+ for (ColumnDTO column : schema.getColumns()) {
+ names.add(column.getColumnName());
+ RelDataType type = getSqlTypeFromOpenTSDBType(typeFactory, column.getColumnType());
+ type = typeFactory.createTypeWithNullability(type, column.isNullable());
+ types.add(type);
+ }
+ }
+
+ private RelDataType getSqlTypeFromOpenTSDBType(RelDataTypeFactory typeFactory, OpenTSDBTypes type) {
+ switch (type) {
+ case STRING:
+ return typeFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE);
+ case DOUBLE:
+ return typeFactory.createSqlType(SqlTypeName.DOUBLE);
+ case TIMESTAMP:
+ return typeFactory.createSqlType(SqlTypeName.TIMESTAMP);
+ default:
+ throw UserException.unsupportedError()
+ .message(String.format("%s is unsupported now. Currently supported types is %s, %s, %s", type, STRING, DOUBLE, TIMESTAMP))
+ .build(log);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBBatchCreator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBBatchCreator.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBBatchCreator.java
new file mode 100644
index 0000000..935aaa5
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBBatchCreator.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.google.common.collect.Lists;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.physical.base.GroupScan;
+import org.apache.drill.exec.physical.impl.BatchCreator;
+import org.apache.drill.exec.physical.impl.ScanBatch;
+import org.apache.drill.exec.record.CloseableRecordBatch;
+import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.store.RecordReader;
+
+import java.util.List;
+
+public class OpenTSDBBatchCreator implements BatchCreator<OpenTSDBSubScan> {
+
+ @Override
+ public CloseableRecordBatch getBatch(FragmentContext context, OpenTSDBSubScan subScan,
+ List<RecordBatch> children) throws ExecutionSetupException {
+ List<RecordReader> readers = Lists.newArrayList();
+ List<SchemaPath> columns;
+
+ for (OpenTSDBSubScan.OpenTSDBSubScanSpec scanSpec : subScan.getTabletScanSpecList()) {
+ try {
+ if ((columns = subScan.getColumns()) == null) {
+ columns = GroupScan.ALL_COLUMNS;
+ }
+ readers.add(new OpenTSDBRecordReader(subScan.getStorageEngine().getClient(), scanSpec, columns));
+ } catch (Exception e) {
+ throw new ExecutionSetupException(e);
+ }
+ }
+ return new ScanBatch(subScan, context, readers);
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBGroupScan.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBGroupScan.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBGroupScan.java
new file mode 100644
index 0000000..47c805a
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBGroupScan.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.fasterxml.jackson.annotation.JacksonInject;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.madhukaraphatak.sizeof.SizeEstimator;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.physical.base.AbstractGroupScan;
+import org.apache.drill.exec.physical.base.GroupScan;
+import org.apache.drill.exec.physical.base.PhysicalOperator;
+import org.apache.drill.exec.physical.base.ScanStats;
+import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
+import org.apache.drill.exec.store.StoragePluginRegistry;
+import org.apache.drill.exec.store.openTSDB.OpenTSDBSubScan.OpenTSDBSubScanSpec;
+import org.apache.drill.exec.store.openTSDB.client.services.ServiceImpl;
+import org.apache.drill.exec.store.openTSDB.dto.MetricDTO;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.apache.drill.exec.store.openTSDB.Util.fromRowData;
+
+@JsonTypeName("openTSDB-scan")
+public class OpenTSDBGroupScan extends AbstractGroupScan {
+
+ private OpenTSDBStoragePluginConfig storagePluginConfig;
+ private OpenTSDBScanSpec openTSDBScanSpec;
+ private OpenTSDBStoragePlugin storagePlugin;
+
+ private List<SchemaPath> columns;
+
+ @JsonCreator
+ public OpenTSDBGroupScan(@JsonProperty("openTSDBScanSpec") OpenTSDBScanSpec openTSDBScanSpec,
+ @JsonProperty("storage") OpenTSDBStoragePluginConfig openTSDBStoragePluginConfig,
+ @JsonProperty("columns") List<SchemaPath> columns,
+ @JacksonInject StoragePluginRegistry pluginRegistry) throws IOException, ExecutionSetupException {
+ this((OpenTSDBStoragePlugin) pluginRegistry.getPlugin(openTSDBStoragePluginConfig), openTSDBScanSpec, columns);
+ }
+
+ public OpenTSDBGroupScan(OpenTSDBStoragePlugin storagePlugin,
+ OpenTSDBScanSpec scanSpec, List<SchemaPath> columns) {
+ super((String) null);
+ this.storagePlugin = storagePlugin;
+ this.storagePluginConfig = storagePlugin.getConfig();
+ this.openTSDBScanSpec = scanSpec;
+ this.columns = columns == null || columns.size() == 0 ? ALL_COLUMNS : columns;
+ }
+
+ /**
+ * Private constructor, used for cloning.
+ *
+ * @param that The OpenTSDBGroupScan to clone
+ */
+ private OpenTSDBGroupScan(OpenTSDBGroupScan that) {
+ super((String) null);
+ this.columns = that.columns;
+ this.openTSDBScanSpec = that.openTSDBScanSpec;
+ this.storagePlugin = that.storagePlugin;
+ this.storagePluginConfig = that.storagePluginConfig;
+ }
+
+ @Override
+ public int getMaxParallelizationWidth() {
+ return 1;
+ }
+
+ @Override
+ public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) {
+ }
+
+ @Override
+ public OpenTSDBSubScan getSpecificScan(int minorFragmentId) {
+ List<OpenTSDBSubScanSpec> scanSpecList = Lists.newArrayList();
+ scanSpecList.add(new OpenTSDBSubScanSpec(getTableName()));
+ return new OpenTSDBSubScan(storagePlugin, storagePluginConfig, scanSpecList, this.columns);
+ }
+
+ @Override
+ public ScanStats getScanStats() {
+ ServiceImpl client = storagePlugin.getClient();
+ Map<String, String> params = fromRowData(openTSDBScanSpec.getTableName());
+ Set<MetricDTO> allMetrics = client.getAllMetrics(params);
+ long numMetrics = allMetrics.size();
+ float approxDiskCost = 0;
+ if (numMetrics != 0) {
+ MetricDTO metricDTO = allMetrics.iterator().next();
+ // This method estimates the sizes of Java objects (number of bytes of memory they occupy).
+ // more detailed information about how this estimation method work you can find in this article
+ // http://www.javaworld.com/javaworld/javaqa/2003-12/02-qa-1226-sizeof.html
+ approxDiskCost = SizeEstimator.estimate(metricDTO) * numMetrics;
+ }
+ return new ScanStats(ScanStats.GroupScanProperty.EXACT_ROW_COUNT, numMetrics, 1, approxDiskCost);
+ }
+
+ @Override
+ @JsonIgnore
+ public PhysicalOperator getNewWithChildren(List<PhysicalOperator> children) {
+ Preconditions.checkArgument(children.isEmpty());
+ return new OpenTSDBGroupScan(this);
+ }
+
+ @Override
+ public String getDigest() {
+ return toString();
+ }
+
+ @Override
+ @JsonIgnore
+ public boolean canPushdownProjects(List<SchemaPath> columns) {
+ return true;
+ }
+
+ @JsonIgnore
+ public String getTableName() {
+ return getOpenTSDBScanSpec().getTableName();
+ }
+
+ @JsonProperty
+ public OpenTSDBScanSpec getOpenTSDBScanSpec() {
+ return openTSDBScanSpec;
+ }
+
+ @JsonProperty("storage")
+ public OpenTSDBStoragePluginConfig getStoragePluginConfig() {
+ return storagePluginConfig;
+ }
+
+ @JsonProperty
+ public List<SchemaPath> getColumns() {
+ return columns;
+ }
+
+ @Override
+ public GroupScan clone(List<SchemaPath> columns) {
+ OpenTSDBGroupScan newScan = new OpenTSDBGroupScan(this);
+ newScan.columns = columns;
+ return newScan;
+ }
+
+ @Override
+ public String toString() {
+ return "OpenTSDBGroupScan [OpenTSDBScanSpec=" + openTSDBScanSpec + ", columns=" + columns
+ + "]";
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBRecordReader.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBRecordReader.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBRecordReader.java
new file mode 100644
index 0000000..044c232
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBRecordReader.java
@@ -0,0 +1,258 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.expr.TypeHelper;
+import org.apache.drill.exec.ops.OperatorContext;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.store.AbstractRecordReader;
+import org.apache.drill.exec.store.openTSDB.client.OpenTSDBTypes;
+import org.apache.drill.exec.store.openTSDB.client.Schema;
+import org.apache.drill.exec.store.openTSDB.client.Service;
+import org.apache.drill.exec.store.openTSDB.dto.ColumnDTO;
+import org.apache.drill.exec.store.openTSDB.dto.MetricDTO;
+import org.apache.drill.exec.vector.NullableFloat8Vector;
+import org.apache.drill.exec.vector.NullableTimeStampVector;
+import org.apache.drill.exec.vector.NullableVarCharVector;
+import org.apache.drill.exec.vector.ValueVector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.drill.exec.store.openTSDB.Constants.METRIC_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Util.fromRowData;
+
+public class OpenTSDBRecordReader extends AbstractRecordReader {
+
+ private static final Logger log = LoggerFactory.getLogger(OpenTSDBRecordReader.class);
+
+ // batch size should not exceed max allowed record count
+ private static final int TARGET_RECORD_COUNT = 4000;
+
+ private static final Map<OpenTSDBTypes, MinorType> TYPES;
+
+ private Service db;
+
+ private Iterator<MetricDTO> tableIterator;
+ private OutputMutator output;
+ private ImmutableList<ProjectedColumnInfo> projectedCols;
+
+ private Map<String, String> params;
+
+ public OpenTSDBRecordReader(Service client, OpenTSDBSubScan.OpenTSDBSubScanSpec subScanSpec,
+ List<SchemaPath> projectedColumns) throws IOException {
+ setColumns(projectedColumns);
+ this.db = client;
+ this.params =
+ fromRowData(subScanSpec.getTableName());
+ log.debug("Scan spec: {}", subScanSpec);
+ }
+
+ @Override
+ public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
+ this.output = output;
+ Set<MetricDTO> metrics =
+ db.getAllMetrics(params);
+ if (metrics == null) {
+ throw UserException.validationError()
+ .message(String.format("Table '%s' not found", params.get(METRIC_PARAM)))
+ .build(log);
+ }
+ this.tableIterator = metrics.iterator();
+ }
+
+ @Override
+ public int next() {
+ try {
+ return processOpenTSDBTablesData();
+ } catch (SchemaChangeException e) {
+ throw new DrillRuntimeException(e);
+ }
+ }
+
+ @Override
+ public void close() throws Exception {
+ }
+
+ static {
+ TYPES = ImmutableMap.<OpenTSDBTypes, MinorType>builder()
+ .put(OpenTSDBTypes.STRING, MinorType.VARCHAR)
+ .put(OpenTSDBTypes.DOUBLE, MinorType.FLOAT8)
+ .put(OpenTSDBTypes.TIMESTAMP, MinorType.TIMESTAMP)
+ .build();
+ }
+
+ private static class ProjectedColumnInfo {
+ ValueVector vv;
+ ColumnDTO openTSDBColumn;
+ }
+
+ private int processOpenTSDBTablesData() throws SchemaChangeException {
+ int rowCounter = 0;
+ while (tableIterator.hasNext() && rowCounter < TARGET_RECORD_COUNT) {
+ MetricDTO metricDTO = tableIterator.next();
+ rowCounter = addRowResult(metricDTO, rowCounter);
+ }
+ return rowCounter;
+ }
+
+ private int addRowResult(MetricDTO table, int rowCounter) throws SchemaChangeException {
+ setupProjectedColsIfItNull();
+ for (String time : table.getDps().keySet()) {
+ String value = table.getDps().get(time);
+ setupDataToDrillTable(table, time, value, table.getTags(), rowCounter);
+ rowCounter++;
+ }
+ return rowCounter;
+ }
+
+ private void setupProjectedColsIfItNull() throws SchemaChangeException {
+ if (projectedCols == null) {
+ initCols(new Schema(db, params.get(METRIC_PARAM)));
+ }
+ }
+
+ private void setupDataToDrillTable(MetricDTO table, String timestamp, String value, Map<String, String> tags, int rowCount) {
+ for (ProjectedColumnInfo pci : projectedCols) {
+ switch (pci.openTSDBColumn.getColumnName()) {
+ case "metric":
+ setStringColumnValue(table.getMetric(), pci, rowCount);
+ break;
+ case "aggregate tags":
+ setStringColumnValue(table.getAggregateTags().toString(), pci, rowCount);
+ break;
+ case "timestamp":
+ setTimestampColumnValue(timestamp, pci, rowCount);
+ break;
+ case "aggregated value":
+ setDoubleColumnValue(value, pci, rowCount);
+ break;
+ default:
+ setStringColumnValue(tags.get(pci.openTSDBColumn.getColumnName()), pci, rowCount);
+ }
+ }
+ }
+
+ private void setTimestampColumnValue(String timestamp, ProjectedColumnInfo pci, int rowCount) {
+ setTimestampColumnValue(timestamp != null ? Long.parseLong(timestamp) : Long.parseLong("0"), pci, rowCount);
+ }
+
+ private void setDoubleColumnValue(String value, ProjectedColumnInfo pci, int rowCount) {
+ setDoubleColumnValue(value != null ? Double.parseDouble(value) : 0.0, pci, rowCount);
+ }
+
+ private void setStringColumnValue(String data, ProjectedColumnInfo pci, int rowCount) {
+ if (data == null) {
+ data = "null";
+ }
+ ByteBuffer value = ByteBuffer.wrap(data.getBytes(UTF_8));
+ ((NullableVarCharVector.Mutator) pci.vv.getMutator())
+ .setSafe(rowCount, value, 0, value.remaining());
+ }
+
+ private void setTimestampColumnValue(Long data, ProjectedColumnInfo pci, int rowCount) {
+ ((NullableTimeStampVector.Mutator) pci.vv.getMutator())
+ .setSafe(rowCount, data * 1000);
+ }
+
+ private void setDoubleColumnValue(Double data, ProjectedColumnInfo pci, int rowCount) {
+ ((NullableFloat8Vector.Mutator) pci.vv.getMutator())
+ .setSafe(rowCount, data);
+ }
+
+ private void initCols(Schema schema) throws SchemaChangeException {
+ ImmutableList.Builder<ProjectedColumnInfo> pciBuilder = ImmutableList.builder();
+
+ for (int i = 0; i < schema.getColumnCount(); i++) {
+
+ ColumnDTO column = schema.getColumnByIndex(i);
+ final String name = column.getColumnName();
+ final OpenTSDBTypes type = column.getColumnType();
+ TypeProtos.MinorType minorType = TYPES.get(type);
+
+ if (isMinorTypeNull(minorType)) {
+ String message = String.format(
+ "A column you queried has a data type that is not currently supported by the OpenTSDB storage plugin. "
+ + "The column's name was %s and its OpenTSDB data type was %s. ", name, type.toString());
+ throw UserException.unsupportedError()
+ .message(message)
+ .build(log);
+ }
+
+ ProjectedColumnInfo pci = getProjectedColumnInfo(column, name, minorType);
+ pciBuilder.add(pci);
+ }
+ projectedCols = pciBuilder.build();
+ }
+
+ private boolean isMinorTypeNull(MinorType minorType) {
+ return minorType == null;
+ }
+
+ private ProjectedColumnInfo getProjectedColumnInfo(ColumnDTO column, String name, MinorType minorType) throws SchemaChangeException {
+ MajorType majorType = getMajorType(minorType);
+
+ MaterializedField field =
+ MaterializedField.create(name, majorType);
+
+ ValueVector vector =
+ getValueVector(minorType, majorType, field);
+
+ return getProjectedColumnInfo(column, vector);
+ }
+
+ private MajorType getMajorType(MinorType minorType) {
+ MajorType majorType;
+ majorType = Types.optional(minorType);
+ return majorType;
+ }
+
+ private ValueVector getValueVector(MinorType minorType, MajorType majorType, MaterializedField field) throws SchemaChangeException {
+ final Class<? extends ValueVector> clazz = TypeHelper.getValueVectorClass(
+ minorType, majorType.getMode());
+ ValueVector vector = output.addField(field, clazz);
+ vector.allocateNew();
+ return vector;
+ }
+
+ private ProjectedColumnInfo getProjectedColumnInfo(ColumnDTO column, ValueVector vector) {
+ ProjectedColumnInfo pci = new ProjectedColumnInfo();
+ pci.vv = vector;
+ pci.openTSDBColumn = column;
+ return pci;
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
new file mode 100644
index 0000000..f93758d
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBScanSpec.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class OpenTSDBScanSpec {
+
+ private final String tableName;
+
+ @JsonCreator
+ public OpenTSDBScanSpec(@JsonProperty("tableName") String tableName) {
+ this.tableName = tableName;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ @Override
+ public String toString() {
+ return "OpenTSDBScanSpec{" +
+ "tableName='" + tableName + '\'' +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java
new file mode 100644
index 0000000..176dff0
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.drill.common.JSONOptions;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.AbstractStoragePlugin;
+import org.apache.drill.exec.store.SchemaConfig;
+import org.apache.drill.exec.store.openTSDB.client.services.ServiceImpl;
+import org.apache.drill.exec.store.openTSDB.schema.OpenTSDBSchemaFactory;
+
+import java.io.IOException;
+
+public class OpenTSDBStoragePlugin extends AbstractStoragePlugin {
+
+ private final DrillbitContext context;
+
+ private final OpenTSDBStoragePluginConfig engineConfig;
+ private final OpenTSDBSchemaFactory schemaFactory;
+
+ private final ServiceImpl db;
+
+ public OpenTSDBStoragePlugin(OpenTSDBStoragePluginConfig configuration, DrillbitContext context, String name) throws IOException {
+ this.context = context;
+ this.schemaFactory = new OpenTSDBSchemaFactory(this, name);
+ this.engineConfig = configuration;
+ this.db = new ServiceImpl(configuration.getConnection());
+ }
+
+ @Override
+ public boolean supportsRead() {
+ return true;
+ }
+
+ @Override
+ public OpenTSDBStoragePluginConfig getConfig() {
+ return engineConfig;
+ }
+
+ @Override
+ public OpenTSDBGroupScan getPhysicalScan(String userName, JSONOptions selection) throws IOException {
+ OpenTSDBScanSpec scanSpec = selection.getListWith(new ObjectMapper(), new TypeReference<OpenTSDBScanSpec>() {
+ });
+ return new OpenTSDBGroupScan(this, scanSpec, null);
+ }
+
+ @Override
+ public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) throws IOException {
+ schemaFactory.registerSchemas(schemaConfig, parent);
+ }
+
+ public ServiceImpl getClient() {
+ return db;
+ }
+
+ DrillbitContext getContext() {
+ return this.context;
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePluginConfig.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePluginConfig.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePluginConfig.java
new file mode 100644
index 0000000..1b67c1d
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePluginConfig.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.StoragePluginConfigBase;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Objects;
+
+@JsonTypeName(OpenTSDBStoragePluginConfig.NAME)
+public class OpenTSDBStoragePluginConfig extends StoragePluginConfigBase {
+
+ private static final Logger log = LoggerFactory.getLogger(OpenTSDBStoragePluginConfig.class);
+
+ public static final String NAME = "openTSDB";
+
+ private final String connection;
+
+ @JsonCreator
+ public OpenTSDBStoragePluginConfig(@JsonProperty("connection") String connection) throws IOException {
+ if (connection == null || connection.isEmpty()) {
+ throw UserException.validationError()
+ .message("Connection property must not be null. Check plugin configuration.")
+ .build(log);
+ }
+ this.connection = connection;
+ }
+
+ public String getConnection() {
+ return connection;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ OpenTSDBStoragePluginConfig that = (OpenTSDBStoragePluginConfig) o;
+ return Objects.equals(connection, that.connection);
+ }
+
+ @Override
+ public int hashCode() {
+ return connection != null ? connection.hashCode() : 0;
+ }
+
+ @Override
+ public String toString() {
+ return "OpenTSDBStoragePluginConfig{" +
+ "connection='" + connection + '\'' +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBSubScan.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBSubScan.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBSubScan.java
new file mode 100644
index 0000000..4e93804
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBSubScan.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.fasterxml.jackson.annotation.JacksonInject;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import com.google.common.base.Preconditions;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.physical.base.AbstractBase;
+import org.apache.drill.exec.physical.base.PhysicalOperator;
+import org.apache.drill.exec.physical.base.PhysicalVisitor;
+import org.apache.drill.exec.physical.base.SubScan;
+import org.apache.drill.exec.store.StoragePluginRegistry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+@JsonTypeName("openTSDB-sub-scan")
+public class OpenTSDBSubScan extends AbstractBase implements SubScan {
+
+ private static final Logger log =
+ LoggerFactory.getLogger(OpenTSDBSubScan.class);
+
+ public final OpenTSDBStoragePluginConfig storage;
+
+ private final List<SchemaPath> columns;
+ private final OpenTSDBStoragePlugin openTSDBStoragePlugin;
+ private final List<OpenTSDBSubScanSpec> tabletScanSpecList;
+
+ @JsonCreator
+ public OpenTSDBSubScan(@JacksonInject StoragePluginRegistry registry,
+ @JsonProperty("storage") OpenTSDBStoragePluginConfig storage,
+ @JsonProperty("tabletScanSpecList") LinkedList<OpenTSDBSubScanSpec> tabletScanSpecList,
+ @JsonProperty("columns") List<SchemaPath> columns) throws ExecutionSetupException {
+ super((String) null);
+ openTSDBStoragePlugin = (OpenTSDBStoragePlugin) registry.getPlugin(storage);
+ this.tabletScanSpecList = tabletScanSpecList;
+ this.storage = storage;
+ this.columns = columns;
+ }
+
+ public OpenTSDBSubScan(OpenTSDBStoragePlugin plugin, OpenTSDBStoragePluginConfig config,
+ List<OpenTSDBSubScanSpec> tabletInfoList, List<SchemaPath> columns) {
+ super((String) null);
+ openTSDBStoragePlugin = plugin;
+ storage = config;
+ this.tabletScanSpecList = tabletInfoList;
+ this.columns = columns;
+ }
+
+ @Override
+ public int getOperatorType() {
+ return 0;
+ }
+
+ @Override
+ public boolean isExecutable() {
+ return false;
+ }
+
+ @Override
+ public PhysicalOperator getNewWithChildren(List<PhysicalOperator> children) throws ExecutionSetupException {
+ Preconditions.checkArgument(children.isEmpty());
+ return new OpenTSDBSubScan(openTSDBStoragePlugin, storage, tabletScanSpecList, columns);
+ }
+
+ @Override
+ public Iterator<PhysicalOperator> iterator() {
+ return Collections.emptyIterator();
+ }
+
+ @Override
+ public <T, X, E extends Throwable> T accept(PhysicalVisitor<T, X, E> physicalVisitor, X value) throws E {
+ return physicalVisitor.visitSubScan(this, value);
+ }
+
+ public List<SchemaPath> getColumns() {
+ return columns;
+ }
+
+ public List<OpenTSDBSubScanSpec> getTabletScanSpecList() {
+ return tabletScanSpecList;
+ }
+
+ @JsonIgnore
+ public OpenTSDBStoragePlugin getStorageEngine() {
+ return openTSDBStoragePlugin;
+ }
+
+ @JsonProperty("storage")
+ public OpenTSDBStoragePluginConfig getStorageConfig() {
+ return storage;
+ }
+
+ public static class OpenTSDBSubScanSpec {
+
+ private final String tableName;
+
+ @JsonCreator
+ public OpenTSDBSubScanSpec(@JsonProperty("tableName") String tableName) {
+ this.tableName = tableName;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Util.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Util.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Util.java
new file mode 100644
index 0000000..6e0ef05
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/Util.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB;
+
+import com.google.common.base.Splitter;
+import org.apache.drill.common.exceptions.UserException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+
+public class Util {
+
+ private static final Logger log = LoggerFactory.getLogger(Util.class);
+
+ /**
+ * Parse FROM parameters to Map representation
+ *
+ * @param rowData with this syntax (metric=warp.speed.test)
+ * @return Map with params key: metric, value: warp.speed.test
+ */
+ public static Map<String, String> fromRowData(String rowData) {
+ try {
+ String fromRowData = rowData.replaceAll("[()]", "");
+ return Splitter.on(",").trimResults().omitEmptyStrings().withKeyValueSeparator("=").split(fromRowData);
+ } catch (IllegalArgumentException e) {
+ throw UserException.validationError()
+ .message(String.format("Syntax error in the query %s", rowData))
+ .build(log);
+ }
+ }
+
+ /**
+ * @param name Metric name
+ * @return Valid metric name
+ */
+ public static String getValidTableName(String name) {
+ if (!isTableNameValid(name)) {
+ name = fromRowData(name).get("metric");
+ }
+ return name;
+ }
+
+ /**
+ * @param name Metric name
+ * @return true if name is valid
+ */
+ public static boolean isTableNameValid(String name) {
+ return !name.contains("=");
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDB.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDB.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDB.java
new file mode 100644
index 0000000..1d561c2
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDB.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.client;
+
+import org.apache.drill.exec.store.openTSDB.client.query.DBQuery;
+import org.apache.drill.exec.store.openTSDB.dto.MetricDTO;
+import retrofit2.Call;
+import retrofit2.http.Body;
+import retrofit2.http.GET;
+import retrofit2.http.POST;
+
+import java.util.Set;
+
+/**
+ * Client for API requests to openTSDB
+ */
+public interface OpenTSDB {
+
+ /**
+ * Used for getting all metrics names from openTSDB
+ *
+ * @return Set<String> with all tables names
+ */
+ @GET("api/suggest?type=metrics&max=" + Integer.MAX_VALUE)
+ Call<Set<String>> getAllTablesName();
+
+ /**
+ * Overloaded getTables for POST request to DB
+ *
+ * @param query Query for for selecting data
+ * @return Set<Table> with metrics from openTSDB
+ */
+ @POST("api/query")
+ Call<Set<MetricDTO>> getTables(@Body DBQuery query);
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDBTypes.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDBTypes.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDBTypes.java
new file mode 100644
index 0000000..2a6b802
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/OpenTSDBTypes.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.client;
+
+/**
+ * Types in openTSDB records,
+ * used for converting openTSDB data to Sql representation
+ */
+public enum OpenTSDBTypes {
+ STRING,
+ DOUBLE,
+ TIMESTAMP
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Schema.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Schema.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Schema.java
new file mode 100644
index 0000000..2c8dc9f
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Schema.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.client;
+
+import org.apache.drill.exec.store.openTSDB.dto.ColumnDTO;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.drill.exec.store.openTSDB.Constants.AGGREGATOR_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Constants.DEFAULT_TIME;
+import static org.apache.drill.exec.store.openTSDB.Constants.METRIC_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Constants.SUM_AGGREGATOR;
+import static org.apache.drill.exec.store.openTSDB.Constants.TIME_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Util.getValidTableName;
+import static org.apache.drill.exec.store.openTSDB.client.Schema.DefaultColumns.AGGREGATED_VALUE;
+import static org.apache.drill.exec.store.openTSDB.client.Schema.DefaultColumns.AGGREGATE_TAGS;
+import static org.apache.drill.exec.store.openTSDB.client.Schema.DefaultColumns.METRIC;
+import static org.apache.drill.exec.store.openTSDB.client.Schema.DefaultColumns.TIMESTAMP;
+
+/**
+ * Abstraction for representing structure of openTSDB table
+ */
+public class Schema {
+
+ private static final Logger log =
+ LoggerFactory.getLogger(Schema.class);
+
+ private final List<ColumnDTO> columns = new ArrayList<>();
+ private final Service db;
+ private final String name;
+
+ public Schema(Service db, String name) {
+ this.db = db;
+ this.name = name;
+ setupStructure();
+ }
+
+ private void setupStructure() {
+ columns.add(new ColumnDTO(METRIC.toString(), OpenTSDBTypes.STRING));
+ columns.add(new ColumnDTO(AGGREGATE_TAGS.toString(), OpenTSDBTypes.STRING));
+ columns.add(new ColumnDTO(TIMESTAMP.toString(), OpenTSDBTypes.TIMESTAMP));
+ columns.add(new ColumnDTO(AGGREGATED_VALUE.toString(), OpenTSDBTypes.DOUBLE));
+ columns.addAll(db.getUnfixedColumns(getParamsForQuery()));
+ }
+
+ /**
+ * Return list with all columns names and its types
+ *
+ * @return List<ColumnDTO>
+ */
+ public List<ColumnDTO> getColumns() {
+ return Collections.unmodifiableList(columns);
+ }
+
+ /**
+ * Number of columns in table
+ *
+ * @return number of table columns
+ */
+ public int getColumnCount() {
+ return columns.size();
+ }
+
+ /**
+ * @param columnIndex index of required column in table
+ * @return ColumnDTO
+ */
+ public ColumnDTO getColumnByIndex(int columnIndex) {
+ return columns.get(columnIndex);
+ }
+
+ // Create map with required params, for querying metrics.
+ // Without this params, we cannot make API request to db.
+ private Map<String, String> getParamsForQuery() {
+ HashMap<String, String> params = new HashMap<>();
+ params.put(METRIC_PARAM, getValidTableName(name));
+ params.put(AGGREGATOR_PARAM, SUM_AGGREGATOR);
+ params.put(TIME_PARAM, DEFAULT_TIME);
+ return params;
+ }
+
+ /**
+ * Structure with constant openTSDB columns
+ */
+ enum DefaultColumns {
+
+ METRIC("metric"),
+ TIMESTAMP("timestamp"),
+ AGGREGATE_TAGS("aggregate tags"),
+ AGGREGATED_VALUE("aggregated value");
+
+ private String columnName;
+
+ DefaultColumns(String name) {
+ this.columnName = name;
+ }
+
+ @Override
+ public String toString() {
+ return columnName;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Service.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Service.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Service.java
new file mode 100644
index 0000000..0be7394
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/Service.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.client;
+
+import org.apache.drill.exec.store.openTSDB.dto.ColumnDTO;
+import org.apache.drill.exec.store.openTSDB.dto.MetricDTO;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public interface Service {
+ /**
+ *
+ * Used for getting all Metrics from openTSDB.
+ * Must be present required params: metric, start, aggregator
+ *
+ * @param queryParam parameters for the API request
+ * @return Set<MetricDTO> all metrics
+ */
+ Set<MetricDTO> getAllMetrics(Map<String, String> queryParam);
+
+ /**
+ *
+ * Used for getting all metrics names from openTSDB
+ *
+ * @return Set<String> metric names
+ */
+ Set<String> getAllMetricNames();
+
+ /**
+ *
+ * Used for getting all non fixed columns based on tags from openTSDB
+ * Must be present required params: metric, start, aggregator
+ *
+ * @param queryParam parameters for the API request
+ * @return List<ColumnDTO> columns based on tags
+ */
+ List<ColumnDTO> getUnfixedColumns(Map<String, String> queryParam);
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/DBQuery.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/DBQuery.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/DBQuery.java
new file mode 100644
index 0000000..e79d0ce
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/DBQuery.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.client.query;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * DBQuery is an abstraction of an openTSDB query,
+ * that used for extracting data from the storage system by POST request to DB.
+ * <p>
+ * An OpenTSDB query requires at least one sub query,
+ * a means of selecting which time series should be included in the result set.
+ */
+public class DBQuery {
+
+ private static final Logger log =
+ LoggerFactory.getLogger(DBQuery.class);
+ /**
+ * The start time for the query. This can be a relative or absolute timestamp.
+ */
+ private String start;
+ /**
+ * An end time for the query. If not supplied, the TSD will assume the local system time on the server.
+ * This may be a relative or absolute timestamp. This param is optional, and if it isn't specified we will send null
+ * to the db in this field, but in this case db will assume the local system time on the server.
+ */
+ private String end;
+ /**
+ * One or more sub subQueries used to select the time series to return.
+ */
+ private Set<Query> queries;
+
+ private DBQuery(Builder builder) {
+ this.start = builder.start;
+ this.end = builder.end;
+ this.queries = builder.queries;
+ }
+
+ public String getStart() {
+ return start;
+ }
+
+ public String getEnd() {
+ return end;
+ }
+
+ public Set<Query> getQueries() {
+ return queries;
+ }
+
+ public static class Builder {
+
+ private String start;
+ private String end;
+ private Set<Query> queries = new HashSet<>();
+
+ public Builder() {
+ }
+
+ public Builder setStartTime(String startTime) {
+ if (startTime == null) {
+ throw UserException.validationError()
+ .message("start param must be specified")
+ .build(log);
+ }
+ this.start = startTime;
+ return this;
+ }
+
+ public Builder setEndTime(String endTime) {
+ this.end = endTime;
+ return this;
+ }
+
+ public Builder setQueries(Set<Query> queries) {
+ if (queries.isEmpty()) {
+ throw UserException.validationError()
+ .message("Required params such as metric, aggregator weren't specified. " +
+ "Add these params to the query")
+ .build(log);
+ }
+ this.queries = queries;
+ return this;
+ }
+
+ public DBQuery build() {
+ return new DBQuery(this);
+ }
+
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ DBQuery dbQuery = (DBQuery) o;
+
+ if (!start.equals(dbQuery.start)) {
+ return false;
+ }
+ if (!end.equals(dbQuery.end)) {
+ return false;
+ }
+ return queries.equals(dbQuery.queries);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = start.hashCode();
+ result = 31 * result + end.hashCode();
+ result = 31 * result + queries.hashCode();
+ return result;
+ }
+
+ @Override
+ public String toString() {
+ return "DBQuery{" +
+ "start='" + start + '\'' +
+ ", end='" + end + '\'' +
+ ", queries=" + queries +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/Query.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/Query.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/Query.java
new file mode 100644
index 0000000..bdcd1c4
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/query/Query.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.client.query;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Query is an abstraction of openTSDB subQuery
+ * and it is integral part of DBQuery
+ * <p>
+ * Each sub query can retrieve individual or groups of timeseries data,
+ * performing aggregation on each set.
+ */
+public class Query {
+ private static final Logger log =
+ LoggerFactory.getLogger(Query.class);
+ /**
+ * The name of an aggregation function to use.
+ */
+ private String aggregator;
+ /**
+ * The name of a metric stored in the system
+ */
+ private String metric;
+ /**
+ * Whether or not the data should be converted into deltas before returning.
+ * This is useful if the metric is a continuously incrementing counter
+ * and you want to view the rate of change between data points.
+ */
+ private String rate;
+ /**
+ * An optional downsampling function to reduce the amount of data returned.
+ */
+ private String downsample;
+ /**
+ * To drill down to specific timeseries or group results by tag,
+ * supply one or more map values in the same format as the query string.
+ */
+ private Map<String, String> tags;
+
+ private Query(Builder builder) {
+ this.aggregator = builder.aggregator;
+ this.metric = builder.metric;
+ this.rate = builder.rate;
+ this.downsample = builder.downsample;
+ this.tags = builder.tags;
+ }
+
+ public String getAggregator() {
+ return aggregator;
+ }
+
+ public String getMetric() {
+ return metric;
+ }
+
+ public String getRate() {
+ return rate;
+ }
+
+ public String getDownsample() {
+ return downsample;
+ }
+
+ public Map<String, String> getTags() {
+ return tags;
+ }
+
+ public static class Builder {
+
+ private String aggregator;
+ private String metric;
+ private String rate;
+ private String downsample;
+ private Map<String, String> tags = new HashMap<>();
+
+ public Builder(String metric) {
+ this.metric = metric;
+ }
+
+ public Builder setAggregator(String aggregator) {
+ if (aggregator == null) {
+ throw UserException.validationError()
+ .message("aggregator param must be specified")
+ .build(log);
+ }
+ this.aggregator = aggregator;
+ return this;
+ }
+
+ public Builder setMetric(String metric) {
+ if (metric == null) {
+ throw UserException.validationError()
+ .message("metric param must be specified")
+ .build(log);
+ }
+ this.metric = metric;
+ return this;
+ }
+
+ public Builder setRate(String rate) {
+ this.rate = rate;
+ return this;
+ }
+
+ public Builder setDownsample(String downsample) {
+ this.downsample = downsample;
+ return this;
+ }
+
+ public Builder setTags(Map<String, String> tags) {
+ this.tags = tags;
+ return this;
+ }
+
+ public Query build() {
+ return new Query(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ Query subQuery = (Query) o;
+
+ if (aggregator != null ? !aggregator.equals(subQuery.aggregator) : subQuery.aggregator != null) {
+ return false;
+ }
+ if (metric != null ? !metric.equals(subQuery.metric) : subQuery.metric != null) {
+ return false;
+ }
+ if (rate != null ? !rate.equals(subQuery.rate) : subQuery.rate != null) {
+ return false;
+ }
+ if (downsample != null ? !downsample.equals(subQuery.downsample) : subQuery.downsample != null) {
+ return false;
+ }
+ return tags != null ? tags.equals(subQuery.tags) : subQuery.tags == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = aggregator != null ? aggregator.hashCode() : 0;
+ result = 31 * result + (metric != null ? metric.hashCode() : 0);
+ result = 31 * result + (rate != null ? rate.hashCode() : 0);
+ result = 31 * result + (downsample != null ? downsample.hashCode() : 0);
+ result = 31 * result + (tags != null ? tags.hashCode() : 0);
+ return result;
+ }
+
+ @Override
+ public String toString() {
+ return "SubQuery{" +
+ "aggregator='" + aggregator + '\'' +
+ ", metric='" + metric + '\'' +
+ ", rate='" + rate + '\'' +
+ ", downsample='" + downsample + '\'' +
+ ", tags=" + tags +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/services/ServiceImpl.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/services/ServiceImpl.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/services/ServiceImpl.java
new file mode 100644
index 0000000..41730bd
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/client/services/ServiceImpl.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.client.services;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.store.openTSDB.client.OpenTSDB;
+import org.apache.drill.exec.store.openTSDB.client.OpenTSDBTypes;
+import org.apache.drill.exec.store.openTSDB.client.Service;
+import org.apache.drill.exec.store.openTSDB.client.query.DBQuery;
+import org.apache.drill.exec.store.openTSDB.client.query.Query;
+import org.apache.drill.exec.store.openTSDB.dto.ColumnDTO;
+import org.apache.drill.exec.store.openTSDB.dto.MetricDTO;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import retrofit2.Retrofit;
+import retrofit2.converter.jackson.JacksonConverterFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.apache.drill.exec.store.openTSDB.Constants.AGGREGATOR_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Constants.DOWNSAMPLE_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Constants.END_TIME_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Constants.METRIC_PARAM;
+import static org.apache.drill.exec.store.openTSDB.Constants.TIME_PARAM;
+
+public class ServiceImpl implements Service {
+
+ private static final Logger log =
+ LoggerFactory.getLogger(ServiceImpl.class);
+
+ private final OpenTSDB client;
+
+ public ServiceImpl(String connectionURL) {
+ this.client = new Retrofit.Builder()
+ .baseUrl(connectionURL)
+ .addConverterFactory(JacksonConverterFactory.create())
+ .build()
+ .create(OpenTSDB.class);
+ }
+
+ @Override
+ public Set<MetricDTO> getAllMetrics(Map<String, String> queryParams) {
+ return getAllMetricsByTags(queryParams);
+ }
+
+ @Override
+ public Set<String> getAllMetricNames() {
+ return getTableNames();
+ }
+
+ @Override
+ public List<ColumnDTO> getUnfixedColumns(Map<String, String> queryParam) {
+ Set<MetricDTO> metrics = getAllMetricsByTags(queryParam);
+ List<ColumnDTO> unfixedColumns = new ArrayList<>();
+
+ for (MetricDTO metric : metrics) {
+ for (String tag : metric.getTags().keySet()) {
+ ColumnDTO tmp = new ColumnDTO(tag, OpenTSDBTypes.STRING);
+ if (!unfixedColumns.contains(tmp)) {
+ unfixedColumns.add(tmp);
+ }
+ }
+ }
+ return unfixedColumns;
+ }
+
+ private Set<MetricDTO> getAllMetricsByTags(Map<String, String> queryParams) {
+ try {
+ return getAllMetricsFromDBByTags(queryParams);
+ } catch (IOException e) {
+ throw UserException.connectionError(e)
+ .message("Cannot connect to the db. " +
+ "Maybe you have incorrect connection params or db unavailable now")
+ .build(log);
+ }
+ }
+
+ private Set<String> getTableNames() {
+ try {
+ return client.getAllTablesName().execute().body();
+ } catch (IOException e) {
+ throw UserException.connectionError(e)
+ .message("Cannot connect to the db. " +
+ "Maybe you have incorrect connection params or db unavailable now")
+ .build(log);
+ }
+ }
+
+ private Set<MetricDTO> getMetricsByTags(DBQuery base) throws IOException {
+ return client.getTables(base).execute().body();
+ }
+
+ private Set<MetricDTO> getAllMetricsFromDBByTags(Map<String, String> queryParams) throws IOException {
+ Map<String, String> tags = new HashMap<>();
+ DBQuery baseQuery = getConfiguredDbQuery(tags, queryParams);
+
+ Set<MetricDTO> metrics = getBaseMetric(baseQuery);
+ if (metrics == null || metrics.isEmpty()) {
+ throw UserException.validationError()
+ .message(String.format("Table '%s' not found. Please check your query and params", queryParams.get(METRIC_PARAM)))
+ .build(log);
+ }
+ Set<String> extractedTags = getTagsFromMetrics(metrics);
+
+ return getMetricsByTags(extractedTags, queryParams);
+ }
+
+ private Set<MetricDTO> getMetricsByTags(Set<String> extractedTags, Map<String, String> queryParams) throws IOException {
+ Set<MetricDTO> metrics = new HashSet<>();
+ for (String value : extractedTags) {
+ metrics.addAll(getMetricsByTags(getConfiguredDbQuery(getTransformedTag(value), queryParams)));
+ }
+ return metrics;
+ }
+
+ private DBQuery getConfiguredDbQuery(Map<String, String> tags, Map<String, String> queryParams) {
+ Query subQuery = new Query.Builder(queryParams.get(METRIC_PARAM))
+ .setAggregator(queryParams.get(AGGREGATOR_PARAM))
+ .setDownsample(queryParams.get(DOWNSAMPLE_PARAM))
+ .setTags(tags).build();
+
+ Set<Query> queries = new HashSet<>();
+ queries.add(subQuery);
+
+ return new DBQuery.Builder()
+ .setStartTime(queryParams.get(TIME_PARAM))
+ .setEndTime(queryParams.get(END_TIME_PARAM))
+ .setQueries(queries)
+ .build();
+ }
+
+ private Set<MetricDTO> getBaseMetric(DBQuery base) throws IOException {
+ return getMetricsByTags(base);
+ }
+
+ private Set<String> getTagsFromMetrics(Set<MetricDTO> metrics) {
+ Set<String> extractedTags = new HashSet<>();
+
+ for (MetricDTO table : metrics) {
+ extractedTags.addAll(table.getAggregateTags());
+ extractedTags.addAll(table.getTags().keySet());
+ }
+
+ return extractedTags;
+ }
+
+ private Map<String, String> getTransformedTag(String tag) {
+ Map<String, String> tags = new HashMap<>();
+ tags.put(tag, "*");
+ return tags;
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/ColumnDTO.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/ColumnDTO.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/ColumnDTO.java
new file mode 100644
index 0000000..03c5952
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/ColumnDTO.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.dto;
+
+import org.apache.drill.exec.store.openTSDB.client.OpenTSDBTypes;
+
+import java.util.Objects;
+
+public class ColumnDTO {
+
+ private final String columnName;
+ private final OpenTSDBTypes columnType;
+
+ public ColumnDTO(String columnName, OpenTSDBTypes columnType) {
+ this.columnName = columnName;
+ this.columnType = columnType;
+ }
+
+ public String getColumnName() {
+ return columnName;
+ }
+
+ public OpenTSDBTypes getColumnType() {
+ return columnType;
+ }
+
+ public boolean isNullable() {
+ return true;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ColumnDTO columnDTO = (ColumnDTO) o;
+ return Objects.equals(columnName, columnDTO.columnName) &&
+ columnType == columnDTO.columnType;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(columnName, columnType);
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/496c97d1/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/MetricDTO.java
----------------------------------------------------------------------
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/MetricDTO.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/MetricDTO.java
new file mode 100644
index 0000000..7e6285f
--- /dev/null
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/dto/MetricDTO.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.openTSDB.dto;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+public class MetricDTO {
+
+ private String metric;
+ private Map<String, String> tags;
+ private List<String> aggregateTags;
+ private Map<String, String> dps;
+
+ public String getMetric() {
+ return metric;
+ }
+
+ public Map<String, String> getTags() {
+ return tags;
+ }
+
+ public List<String> getAggregateTags() {
+ return aggregateTags;
+ }
+
+ public Map<String, String> getDps() {
+ return dps;
+ }
+
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ MetricDTO metricDTO = (MetricDTO) o;
+ return Objects.equals(metric, metricDTO.metric) &&
+ Objects.equals(tags, metricDTO.tags) &&
+ Objects.equals(aggregateTags, metricDTO.aggregateTags) &&
+ Objects.equals(dps, metricDTO.dps);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(metric, tags, aggregateTags, dps);
+ }
+
+ @Override
+ public String toString() {
+ return "Table{" +
+ "metric='" + metric + '\'' +
+ ", tags=" + tags +
+ ", aggregateTags=" + aggregateTags +
+ ", dps=" + dps +
+ '}';
+ }
+}
[10/11] drill git commit: DRILL-5923: Display name for query state
Posted by ar...@apache.org.
DRILL-5923: Display name for query state
closes #1021
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/30da051b
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/30da051b
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/30da051b
Branch: refs/heads/master
Commit: 30da051b968926b7ead388b7df56c402dbdd5cb8
Parents: ed6c4bc
Author: Prasad Nagaraj Subramanya <pr...@gmail.com>
Authored: Thu Nov 9 15:00:15 2017 -0800
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:45:20 2017 +0200
----------------------------------------------------------------------
.../server/rest/profile/ProfileResources.java | 8 ++--
.../exec/server/rest/profile/ProfileUtil.java | 48 ++++++++++++++++++++
.../server/rest/profile/ProfileWrapper.java | 4 ++
.../src/main/resources/rest/profile/profile.ftl | 3 +-
4 files changed, 59 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/30da051b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
index 875c96e..14056b0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileResources.java
@@ -63,7 +63,7 @@ import com.google.common.collect.Lists;
@Path("/")
@RolesAllowed(DrillUserPrincipal.AUTHENTICATED_ROLE)
public class ProfileResources {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProfileResources.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProfileResources.class);
@Inject UserAuthEnabled authEnabled;
@Inject WorkManager work;
@@ -217,7 +217,8 @@ public class ProfileResources {
runningQueries.add(
new ProfileInfo(work.getContext().getConfig(),
runningEntry.getKey(), profile.getStart(), System.currentTimeMillis(),
- profile.getForeman().getAddress(), profile.getQuery(), profile.getState().name(),
+ profile.getForeman().getAddress(), profile.getQuery(),
+ ProfileUtil.getQueryStateDisplayName(profile.getState()),
profile.getUser(), profile.getTotalCost(), profile.getQueueName()));
}
} catch (Exception e) {
@@ -247,7 +248,8 @@ public class ProfileResources {
finishedQueries.add(
new ProfileInfo(work.getContext().getConfig(),
profileEntry.getKey(), profile.getStart(), profile.getEnd(),
- profile.getForeman().getAddress(), profile.getQuery(), profile.getState().name(),
+ profile.getForeman().getAddress(), profile.getQuery(),
+ ProfileUtil.getQueryStateDisplayName(profile.getState()),
profile.getUser(), profile.getTotalCost(), profile.getQueueName()));
}
} catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/drill/blob/30da051b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileUtil.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileUtil.java
new file mode 100644
index 0000000..cfc7977
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileUtil.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.server.rest.profile;
+
+import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
+
+public class ProfileUtil {
+ // Display names for QueryState enum in UserBitShared.proto
+ private static final String[] queryStateDisplayNames = {
+ "Starting", // STARTING = 0
+ "Running", // RUNNING = 1
+ "Succeeded", // COMPLETED = 2
+ "Canceled", // CANCELED = 3
+ "Failed", // FAILED = 4
+ "CancellationRequested", // CANCELLATION_REQUESTED = 5
+ "Enqueued" // ENQUEUED = 6
+ };
+
+
+ /**
+ * Utility to return display name for query state
+ * @param queryState
+ * @return display string for query state
+ */
+ public final static String getQueryStateDisplayName(QueryState queryState) {
+ int queryStateOrdinal = queryState.getNumber();
+ if (queryStateOrdinal >= queryStateDisplayNames.length) {
+ return queryState.name();
+ } else {
+ return queryStateDisplayNames[queryStateOrdinal];
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/30da051b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java
index ef9ccc3..3a7d432 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/profile/ProfileWrapper.java
@@ -148,6 +148,10 @@ public class ProfileWrapper {
return id;
}
+ public String getQueryStateDisplayName() {
+ return ProfileUtil.getQueryStateDisplayName(profile.getState());
+ }
+
public String getPlanningDuration() {
//Check if Planning End is known
if (profile.getPlanEnd() > 0L) {
http://git-wip-us.apache.org/repos/asf/drill/blob/30da051b/exec/java-exec/src/main/resources/rest/profile/profile.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/rest/profile/profile.ftl b/exec/java-exec/src/main/resources/rest/profile/profile.ftl
index 889e17a..ff78da3 100644
--- a/exec/java-exec/src/main/resources/rest/profile/profile.ftl
+++ b/exec/java-exec/src/main/resources/rest/profile/profile.ftl
@@ -135,6 +135,7 @@ table.sortable thead .sorting_desc { background-image: url("/static/img/black-de
<#assign queueName = model.getProfile().getQueueName() />
<#assign queued = queueName != "" && queueName != "-" />
+
<div class="page-header"></div>
<h3>Query Profile</h3>
<div class="panel-group" id="query-profile-accordion">
@@ -162,7 +163,7 @@ table.sortable thead .sorting_desc { background-image: url("/static/img/black-de
</thead>
<tbody>
<tr>
- <td>${model.getProfile().getState().name()}</td>
+ <td>${model.getQueryStateDisplayName()}</td>
<td>${model.getProfile().getForeman().getAddress()}</td>
<td>${model.getProfile().getTotalFragments()}</td>
<#if queued>
[06/11] drill git commit: DRILL-5896: Handle HBase columns vector
creation in the HBaseRecordReader
Posted by ar...@apache.org.
DRILL-5896: Handle HBase columns vector creation in the HBaseRecordReader
closes #1005
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/dfd43d02
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/dfd43d02
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/dfd43d02
Branch: refs/heads/master
Commit: dfd43d020498c09dcb2c3fed4e8c6df23d755d55
Parents: 59c7447
Author: Prasad Nagaraj Subramanya <pr...@gmail.com>
Authored: Wed Oct 25 21:20:07 2017 -0700
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:06:20 2017 +0200
----------------------------------------------------------------------
.../exec/store/hbase/HBaseRecordReader.java | 34 +++++++++++++-------
1 file changed, 23 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/dfd43d02/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
index 4e822df..631c44d 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseRecordReader.java
@@ -75,6 +75,9 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
private TableName hbaseTableName;
private Scan hbaseScan;
+ // scan instance to capture columns for vector creation
+ private Scan hbaseScanColumnsOnly;
+ private Set<String> completeFamilies;
private OperatorContext operatorContext;
private boolean rowKeyOnly;
@@ -87,6 +90,7 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
hbaseTableName = TableName.valueOf(
Preconditions.checkNotNull(subScanSpec, "HBase reader needs a sub-scan spec").getTableName());
hbaseScan = new Scan(subScanSpec.getStartRow(), subScanSpec.getStopRow());
+ hbaseScanColumnsOnly = new Scan();
hbaseScan
.setFilter(subScanSpec.getScanFilter())
.setCaching(TARGET_RECORD_COUNT);
@@ -108,7 +112,8 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
@Override
protected Collection<SchemaPath> transformColumns(Collection<SchemaPath> columns) {
Set<SchemaPath> transformed = Sets.newLinkedHashSet();
- Set<String> completeFamilies = Sets.newHashSet();
+ completeFamilies = Sets.newHashSet();
+
rowKeyOnly = true;
if (!isStarQuery()) {
for (SchemaPath column : columns) {
@@ -121,16 +126,18 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
byte[] family = root.getPath().getBytes();
transformed.add(SchemaPath.getSimplePath(root.getPath()));
PathSegment child = root.getChild();
- if (!completeFamilies.contains(new String(family, StandardCharsets.UTF_8).toLowerCase())) {
- if (child != null && child.isNamed()) {
- byte[] qualifier = child.getNameSegment().getPath().getBytes();
+ if (child != null && child.isNamed()) {
+ byte[] qualifier = child.getNameSegment().getPath().getBytes();
+ hbaseScanColumnsOnly.addColumn(family, qualifier);
+ if (!completeFamilies.contains(root.getPath())) {
hbaseScan.addColumn(family, qualifier);
- } else {
- hbaseScan.addFamily(family);
- completeFamilies.add(new String(family, StandardCharsets.UTF_8).toLowerCase());
}
+ } else {
+ hbaseScan.addFamily(family);
+ completeFamilies.add(root.getPath());
}
}
+
/* if only the row key was requested, add a FirstKeyOnlyFilter to the scan
* to fetch only one KV from each row. If a filter is already part of this
* scan, add the FirstKeyOnlyFilter as the LAST filter of a MUST_PASS_ALL
@@ -168,11 +175,10 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
}
}
- // Add map and child vectors for any HBase column families and/or HBase
- // columns that are requested (in order to avoid later creation of dummy
- // NullableIntVectors for them).
+ // Add map and child vectors for any HBase columns that are requested (in
+ // order to avoid later creation of dummy NullableIntVectors for them).
final Set<Map.Entry<byte[], NavigableSet<byte []>>> familiesEntries =
- hbaseScan.getFamilyMap().entrySet();
+ hbaseScanColumnsOnly.getFamilyMap().entrySet();
for (Map.Entry<byte[], NavigableSet<byte []>> familyEntry : familiesEntries) {
final String familyName = new String(familyEntry.getKey(),
StandardCharsets.UTF_8);
@@ -186,6 +192,12 @@ public class HBaseRecordReader extends AbstractRecordReader implements DrillHBas
}
}
}
+
+ // Add map vectors for any HBase column families that are requested.
+ for (String familyName : completeFamilies) {
+ getOrCreateFamilyVector(familyName, false);
+ }
+
resultScanner = hTable.getScanner(hbaseScan);
} catch (SchemaChangeException | IOException e) {
throw new ExecutionSetupException(e);
[07/11] drill git commit: DRILL-5899: Simple pattern matchers can
work with DrillBuf directly
Posted by ar...@apache.org.
DRILL-5899: Simple pattern matchers can work with DrillBuf directly
closes #1015
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/df95709a
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/df95709a
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/df95709a
Branch: refs/heads/master
Commit: df95709a2dac47e46be59c47dc776efbbe615726
Parents: dfd43d0
Author: Padma Penumarthy <pp...@yahoo.com>
Authored: Wed Oct 25 13:37:25 2017 -0700
Committer: Arina Ielchiieva <ar...@gmail.com>
Committed: Mon Nov 13 11:06:34 2017 +0200
----------------------------------------------------------------------
.../expr/fn/impl/AbstractSqlPatternMatcher.java | 66 ++
.../expr/fn/impl/SqlPatternComplexMatcher.java | 18 +-
.../expr/fn/impl/SqlPatternConstantMatcher.java | 29 +-
.../expr/fn/impl/SqlPatternContainsMatcher.java | 59 +-
.../expr/fn/impl/SqlPatternEndsWithMatcher.java | 35 +-
.../exec/expr/fn/impl/SqlPatternFactory.java | 15 +-
.../exec/expr/fn/impl/SqlPatternMatcher.java | 4 +-
.../fn/impl/SqlPatternStartsWithMatcher.java | 32 +-
.../exec/expr/fn/impl/StringFunctions.java | 16 +-
.../exec/expr/fn/impl/TestSqlPatterns.java | 729 +++++++------------
.../exec/expr/fn/impl/TestStringFunctions.java | 149 ++++
11 files changed, 574 insertions(+), 578 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AbstractSqlPatternMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AbstractSqlPatternMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AbstractSqlPatternMatcher.java
new file mode 100644
index 0000000..245d585
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AbstractSqlPatternMatcher.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn.impl;
+
+import com.google.common.base.Charsets;
+import org.apache.drill.common.exceptions.UserException;
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.charset.CharacterCodingException;
+import java.nio.charset.CharsetEncoder;
+import static org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.logger;
+
+/**
+ * To get good performance for most commonly used pattern matches
+ * CONSTANT('ABC') {@link SqlPatternConstantMatcher}
+ * STARTSWITH('%ABC') {@link SqlPatternStartsWithMatcher}
+ * ENDSWITH('ABC%') {@link SqlPatternEndsWithMatcher }
+ * CONTAINS('%ABC%') {@link SqlPatternContainsMatcher}
+ * we have simple pattern matchers.
+ * Idea is to have our own implementation for simple pattern matchers so we can
+ * avoid heavy weight regex processing, skip UTF-8 decoding and char conversion.
+ * Instead, we encode the pattern string and do byte comparison against native memory.
+ * Overall, this approach
+ * gives us orders of magnitude performance improvement for simple pattern matches.
+ * Anything that is not simple is considered
+ * complex pattern and we use Java regex for complex pattern matches.
+ */
+
+public abstract class AbstractSqlPatternMatcher implements SqlPatternMatcher {
+ protected final String patternString;
+ protected final int patternLength;
+ protected final ByteBuffer patternByteBuffer;
+
+ public AbstractSqlPatternMatcher(String patternString) {
+ this.patternString = patternString;
+
+ final CharsetEncoder charsetEncoder = Charsets.UTF_8.newEncoder();
+ final CharBuffer patternCharBuffer = CharBuffer.wrap(patternString);
+
+ try {
+ patternByteBuffer = charsetEncoder.encode(patternCharBuffer);
+ } catch (CharacterCodingException e) {
+ throw UserException.validationError(e)
+ .message("Failure to encode pattern %s using UTF-8", patternString)
+ .addContext("Message: ", e.getMessage())
+ .build(logger);
+ }
+ patternLength = patternByteBuffer.limit();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternComplexMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternComplexMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternComplexMatcher.java
index 91cc85d..abe46ab 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternComplexMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternComplexMatcher.java
@@ -17,18 +17,22 @@
*/
package org.apache.drill.exec.expr.fn.impl;
+import io.netty.buffer.DrillBuf;
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
public class SqlPatternComplexMatcher implements SqlPatternMatcher {
- java.util.regex.Matcher matcher;
- CharSequence charSequenceWrapper;
+ private final Matcher matcher;
+ private final CharSequenceWrapper charSequenceWrapper;
- public SqlPatternComplexMatcher(String patternString, CharSequence charSequenceWrapper) {
- this.charSequenceWrapper = charSequenceWrapper;
- matcher = java.util.regex.Pattern.compile(patternString).matcher("");
- matcher.reset(charSequenceWrapper);
+ public SqlPatternComplexMatcher(String patternString) {
+ charSequenceWrapper = new CharSequenceWrapper();
+ matcher = Pattern.compile(patternString).matcher(charSequenceWrapper);
}
@Override
- public int match() {
+ public int match(int start, int end, DrillBuf drillBuf) {
+ charSequenceWrapper.setBuffer(start, end, drillBuf);
matcher.reset();
return matcher.matches() ? 1 : 0;
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternConstantMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternConstantMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternConstantMatcher.java
index 3294575..d0e8aca 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternConstantMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternConstantMatcher.java
@@ -17,36 +17,31 @@
*/
package org.apache.drill.exec.expr.fn.impl;
-public class SqlPatternConstantMatcher implements SqlPatternMatcher {
- final String patternString;
- CharSequence charSequenceWrapper;
- final int patternLength;
-
- public SqlPatternConstantMatcher(String patternString, CharSequence charSequenceWrapper) {
- this.patternString = patternString;
- this.charSequenceWrapper = charSequenceWrapper;
- patternLength = patternString.length();
+import io.netty.buffer.DrillBuf;
+
+public class SqlPatternConstantMatcher extends AbstractSqlPatternMatcher {
+
+ public SqlPatternConstantMatcher(String patternString) {
+ super(patternString);
}
@Override
- public int match() {
- int index = 0;
+ public int match(int start, int end, DrillBuf drillBuf) {
// If the lengths are not same, there cannot be a match
- if (patternLength != charSequenceWrapper.length()) {
+ if (patternLength != (end - start)) {
return 0;
}
// simplePattern string has meta characters i.e % and _ and escape characters removed.
// so, we can just directly compare.
- while (index < patternLength) {
- if (patternString.charAt(index) != charSequenceWrapper.charAt(index)) {
- break;
+ for (int index = 0; index < patternLength; index++) {
+ if (patternByteBuffer.get(index) != drillBuf.getByte(start + index)) {
+ return 0;
}
- index++;
}
- return index == patternLength ? 1 : 0;
+ return 1;
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternContainsMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternContainsMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternContainsMatcher.java
index 2602dc8..04f5dac 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternContainsMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternContainsMatcher.java
@@ -17,37 +17,46 @@
*/
package org.apache.drill.exec.expr.fn.impl;
-public class SqlPatternContainsMatcher implements SqlPatternMatcher {
- final String patternString;
- CharSequence charSequenceWrapper;
- final int patternLength;
-
- public SqlPatternContainsMatcher(String patternString, CharSequence charSequenceWrapper) {
- this.patternString = patternString;
- this.charSequenceWrapper = charSequenceWrapper;
- patternLength = patternString.length();
+import io.netty.buffer.DrillBuf;
+
+public class SqlPatternContainsMatcher extends AbstractSqlPatternMatcher {
+
+ public SqlPatternContainsMatcher(String patternString) {
+ super(patternString);
}
@Override
- public int match() {
- final int txtLength = charSequenceWrapper.length();
- int patternIndex = 0;
- int txtIndex = 0;
-
- // simplePattern string has meta characters i.e % and _ and escape characters removed.
- // so, we can just directly compare.
- while (patternIndex < patternLength && txtIndex < txtLength) {
- if (patternString.charAt(patternIndex) != charSequenceWrapper.charAt(txtIndex)) {
- // Go back if there is no match
- txtIndex = txtIndex - patternIndex;
- patternIndex = 0;
- } else {
- patternIndex++;
+ public int match(int start, int end, DrillBuf drillBuf) {
+
+ if (patternLength == 0) { // Everything should match for null pattern string
+ return 1;
+ }
+
+ final int txtLength = end - start;
+
+ // no match if input string length is less than pattern length
+ if (txtLength < patternLength) {
+ return 0;
+ }
+
+
+ final int outerEnd = txtLength - patternLength;
+
+ outer:
+ for (int txtIndex = 0; txtIndex <= outerEnd; txtIndex++) {
+
+ // simplePattern string has meta characters i.e % and _ and escape characters removed.
+ // so, we can just directly compare.
+ for (int patternIndex = 0; patternIndex < patternLength; patternIndex++) {
+ if (patternByteBuffer.get(patternIndex) != drillBuf.getByte(start + txtIndex + patternIndex)) {
+ continue outer;
+ }
}
- txtIndex++;
+
+ return 1;
}
- return patternIndex == patternLength ? 1 : 0;
+ return 0;
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternEndsWithMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternEndsWithMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternEndsWithMatcher.java
index 15fed22..7c83c91 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternEndsWithMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternEndsWithMatcher.java
@@ -17,33 +17,32 @@
*/
package org.apache.drill.exec.expr.fn.impl;
-public class SqlPatternEndsWithMatcher implements SqlPatternMatcher {
- final String patternString;
- CharSequence charSequenceWrapper;
- final int patternLength;
-
- public SqlPatternEndsWithMatcher(String patternString, CharSequence charSequenceWrapper) {
- this.charSequenceWrapper = charSequenceWrapper;
- this.patternString = patternString;
- this.patternLength = patternString.length();
+import io.netty.buffer.DrillBuf;
+
+public class SqlPatternEndsWithMatcher extends AbstractSqlPatternMatcher {
+
+ public SqlPatternEndsWithMatcher(String patternString) {
+ super(patternString);
}
@Override
- public int match() {
- int txtIndex = charSequenceWrapper.length();
- int patternIndex = patternLength;
- boolean matchFound = true; // if pattern is empty string, we always match.
+ public int match(int start, int end, DrillBuf drillBuf) {
+
+ // No match if input string length is less than pattern length.
+ final int txtStart = end - patternLength;
+ if (txtStart < start) {
+ return 0;
+ }
// simplePattern string has meta characters i.e % and _ and escape characters removed.
// so, we can just directly compare.
- while (patternIndex > 0 && txtIndex > 0) {
- if (charSequenceWrapper.charAt(--txtIndex) != patternString.charAt(--patternIndex)) {
- matchFound = false;
- break;
+ for (int index = 0; index < patternLength; index++) {
+ if (patternByteBuffer.get(index) != drillBuf.getByte(txtStart + index)) {
+ return 0;
}
}
- return (patternIndex == 0 && matchFound == true) ? 1 : 0;
+ return 1;
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternFactory.java
index 9c85a01..871f660 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternFactory.java
@@ -18,21 +18,22 @@
package org.apache.drill.exec.expr.fn.impl;
+import org.apache.drill.exec.expr.fn.impl.RegexpUtil.SqlPatternInfo;
+
public class SqlPatternFactory {
- public static SqlPatternMatcher getSqlPatternMatcher(org.apache.drill.exec.expr.fn.impl.RegexpUtil.SqlPatternInfo patternInfo,
- CharSequence charSequenceWrapper)
+ public static SqlPatternMatcher getSqlPatternMatcher(SqlPatternInfo patternInfo)
{
switch (patternInfo.getPatternType()) {
case COMPLEX:
- return new SqlPatternComplexMatcher(patternInfo.getJavaPatternString(), charSequenceWrapper);
+ return new SqlPatternComplexMatcher(patternInfo.getJavaPatternString());
case STARTS_WITH:
- return new SqlPatternStartsWithMatcher(patternInfo.getSimplePatternString(), charSequenceWrapper);
+ return new SqlPatternStartsWithMatcher(patternInfo.getSimplePatternString());
case CONSTANT:
- return new SqlPatternConstantMatcher(patternInfo.getSimplePatternString(), charSequenceWrapper);
+ return new SqlPatternConstantMatcher(patternInfo.getSimplePatternString());
case ENDS_WITH:
- return new SqlPatternEndsWithMatcher(patternInfo.getSimplePatternString(), charSequenceWrapper);
+ return new SqlPatternEndsWithMatcher(patternInfo.getSimplePatternString());
case CONTAINS:
- return new SqlPatternContainsMatcher(patternInfo.getSimplePatternString(), charSequenceWrapper);
+ return new SqlPatternContainsMatcher(patternInfo.getSimplePatternString());
default:
break;
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternMatcher.java
index 9c0c6e2..98c4877 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternMatcher.java
@@ -17,6 +17,8 @@
*/
package org.apache.drill.exec.expr.fn.impl;
+import io.netty.buffer.DrillBuf;
+
public interface SqlPatternMatcher {
- public int match();
+ int match(int start, int end, DrillBuf drillBuf);
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternStartsWithMatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternStartsWithMatcher.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternStartsWithMatcher.java
index 9faae8a..d17ca21 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternStartsWithMatcher.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SqlPatternStartsWithMatcher.java
@@ -17,32 +17,30 @@
*/
package org.apache.drill.exec.expr.fn.impl;
-public class SqlPatternStartsWithMatcher implements SqlPatternMatcher {
- final String patternString;
- CharSequence charSequenceWrapper;
- final int patternLength;
-
- public SqlPatternStartsWithMatcher(String patternString, CharSequence charSequenceWrapper) {
- this.charSequenceWrapper = charSequenceWrapper;
- this.patternString = patternString;
- patternLength = patternString.length();
+import io.netty.buffer.DrillBuf;
+
+public class SqlPatternStartsWithMatcher extends AbstractSqlPatternMatcher {
+
+ public SqlPatternStartsWithMatcher(String patternString) {
+ super(patternString);
}
@Override
- public int match() {
- int index = 0;
- final int txtLength = charSequenceWrapper.length();
+ public int match(int start, int end, DrillBuf drillBuf) {
+
+ if (patternLength > (end - start)) {
+ return 0;
+ }
// simplePattern string has meta characters i.e % and _ and escape characters removed.
// so, we can just directly compare.
- while (index < patternLength && index < txtLength) {
- if (patternString.charAt(index) != charSequenceWrapper.charAt(index)) {
- break;
+ for (int index = 0; index < patternLength; index++) {
+ if (patternByteBuffer.get(index) != drillBuf.getByte(start + index)) {
+ return 0;
}
- index++;
}
- return (index == patternLength ? 1 : 0);
+ return 1;
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
index 2a99ffa..ad3f379 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
@@ -55,7 +55,6 @@ public class StringFunctions{
@Param VarCharHolder input;
@Param(constant=true) VarCharHolder pattern;
@Output BitHolder out;
- @Workspace org.apache.drill.exec.expr.fn.impl.CharSequenceWrapper charSequenceWrapper;
@Workspace org.apache.drill.exec.expr.fn.impl.RegexpUtil.SqlPatternInfo sqlPatternInfo;
@Workspace org.apache.drill.exec.expr.fn.impl.SqlPatternMatcher sqlPatternMatcher;
@@ -63,15 +62,12 @@ public class StringFunctions{
public void setup() {
sqlPatternInfo = org.apache.drill.exec.expr.fn.impl.RegexpUtil.sqlToRegexLike(
org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(pattern.start, pattern.end, pattern.buffer));
- charSequenceWrapper = new org.apache.drill.exec.expr.fn.impl.CharSequenceWrapper();
- sqlPatternMatcher = org.apache.drill.exec.expr.fn.impl.SqlPatternFactory.getSqlPatternMatcher(sqlPatternInfo, charSequenceWrapper);
+ sqlPatternMatcher = org.apache.drill.exec.expr.fn.impl.SqlPatternFactory.getSqlPatternMatcher(sqlPatternInfo);
}
@Override
public void eval() {
- // Reusing same charSequenceWrapper, no need to pass it in.
- charSequenceWrapper.setBuffer(input.start, input.end, input.buffer);
- out.value = sqlPatternMatcher.match();
+ out.value = sqlPatternMatcher.match(input.start, input.end, input.buffer);
}
}
@@ -82,7 +78,6 @@ public class StringFunctions{
@Param(constant=true) VarCharHolder pattern;
@Param(constant=true) VarCharHolder escape;
@Output BitHolder out;
- @Workspace org.apache.drill.exec.expr.fn.impl.CharSequenceWrapper charSequenceWrapper;
@Workspace org.apache.drill.exec.expr.fn.impl.RegexpUtil.SqlPatternInfo sqlPatternInfo;
@Workspace org.apache.drill.exec.expr.fn.impl.SqlPatternMatcher sqlPatternMatcher;
@@ -91,15 +86,12 @@ public class StringFunctions{
sqlPatternInfo = org.apache.drill.exec.expr.fn.impl.RegexpUtil.sqlToRegexLike(
org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(pattern.start, pattern.end, pattern.buffer),
org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(escape.start, escape.end, escape.buffer));
- charSequenceWrapper = new org.apache.drill.exec.expr.fn.impl.CharSequenceWrapper();
- sqlPatternMatcher = org.apache.drill.exec.expr.fn.impl.SqlPatternFactory.getSqlPatternMatcher(sqlPatternInfo, charSequenceWrapper);
+ sqlPatternMatcher = org.apache.drill.exec.expr.fn.impl.SqlPatternFactory.getSqlPatternMatcher(sqlPatternInfo);
}
@Override
public void eval() {
- // Reusing same charSequenceWrapper, no need to pass it in.
- charSequenceWrapper.setBuffer(input.start, input.end, input.buffer);
- out.value = sqlPatternMatcher.match();
+ out.value = sqlPatternMatcher.match(input.start, input.end, input.buffer);
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestSqlPatterns.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestSqlPatterns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestSqlPatterns.java
index d8c1410..2eecb54 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestSqlPatterns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestSqlPatterns.java
@@ -17,11 +17,100 @@
*/
package org.apache.drill.exec.expr.fn.impl;
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.RootAllocatorFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
+
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.charset.CharacterCodingException;
+import java.nio.charset.Charset;
+import java.nio.charset.CharsetEncoder;
+
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestSqlPatterns {
+ BufferAllocator allocator;
+ DrillBuf drillBuf;
+ CharsetEncoder charsetEncoder;
+ CharBuffer charBuffer;
+ ByteBuffer byteBuffer;
+
+ String wideString = "b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4" +
+ "ac6Bn1cxblsXFnkp8g8hiQkUMJPyl6l0jTdsIzQ4PkVCURGGyF0aduGqCXUaKp91gqkRMvL" +
+ "g1Lh6u0NrGCBoJajPxnwZCyh58cN5aFiNscBFKIqqLPTS1vnbR39nmzU88FM8qDepJRhvein" +
+ "hHhmrHdEb22QN20dXEHSygR7vrb2zZhhfWeJbXRsesuYDqdGig801IAS6VWRIdQtJ6gaRhCdNz" +
+ " DWnQWRXlMhcrR4MKJXeBgDtjzbHd0ZS53K8u8ORl6FKxtvdKmwUuHiuMJrQQm6Rgx6WJrAtvTf" +
+ "UE8a5I3nYXdRppnm3MbRsLu4IxXIblh8kmAIG6n2yHwGhpWYkRI7cwl4dOB3bsxxtdaaTlZMMx6T" +
+ "XPaUK10UzfZCAkWG9Du3QhJxxJBZaP3HPebXmw1l5swPohmG3L6zOcEWp7f" +
+ "saldC7TOrFa3ReYFHooclSGTgZ9sWjJ5SYJ0vEkI1RMWoeGcdJq5v4lrcB6YjrMqQJIaxAdRnIaNG" +
+ "V6oR9SkI4diiXspIvRWj6PMkpqI02ovI3va49bHauTrqTyM9eIhS" +
+ "0Mc3SHzknQwHJAFkqmhV9Lm2VLULou2iJDvc5sWW8W48IODGqGytqLogA01Cuo3gURmH2057nCld9" +
+ "PDHQEieFMddi4gKPOv4es1YX2aBo4RfYiTlUyXd6gGujVPgU2j" +
+ "AAhcz6JqVC08O73gM9zOAM2l4PwN2TN3lBufkQUGyOzHtoTDjSdQ2DPXIks9A6ehIpn92n1UtdrJeMz" +
+ "4oMN4kwP95YjQk1ko2e3DVAiPVlCiaWqnzXKa41kLVs3KiBhfAff5" +
+ "hoTnBGn9CaXed6g6kLs2YBTQYM9yLW9Wb5qNhLeCM4GGJM8dUWqqEsWYPrcPAkCMa6LXfgEcsCwQ6ij" +
+ "JhhjcxwoafBRyyEvQ6Pfhg8IqJ0afBpAZHhR2y4I11zbaJZqs3WG3H3aQHT" +
+ "wcPHdBHnk65GdL3Njuoo0K4mcmN6lk7pWptHwTjkw59zTw834PZ8TWm5XiUnsi9JKy41MPqHcbO0nN" +
+ "SYl9Q6kEjv4nt8p9unhUYqgrGvLl42nvqGb1F47f6PvxkewuouxMFAszYhaMjZzIf5" +
+ "AgmvaXbSP9MKYu6EkkvM9CIhYGZuq7PJUk6wmoG6IxIfOokUcnrGzuU9INFUuXf4LptQ987GU3hw0d" +
+ "yMNf6nncwABOOoC5EnqYBNoq29Mf54H5k2Xi8y1fh8ldtKcW9T4WsaXun9fKofegfhwY8wgfoG" +
+ "eW2YNW3fdalIsggRzMEAXVDxj7oieReUGiT53uV2kcmcQRQLdUDUcOC1JEiSRpgZl38c1DDVRlz8Rbhi" +
+ "KUxMqNCPx6PABXCPocpfXJa0yBT0l3ssgMlDfKsxAHX6aEC86zk0CDmTqZPmBjLAoYaHA3" +
+ "uGqoARbQ6rhIBHOdkb7PoRImjmF4sQ60TBIWdao9dqLMjslhOQrGQlPIniW5I1V9nisc5lV0jEqeaC3y" +
+ "lSnjhieVJ7H0FYjcsihjQryhyRwUZBGxWFuh0hI9rOv8h5jHKb549hOHPcIjSdLa6M048G" +
+ "9drX0LNEixfp7WUqq2DyRfBioybmoHVzFWzhXrMJXzwHakzLwb4T2BHcLK6VpC4b2GodYlZe43ggxTNUErif" +
+ "NEfEfxZhDj6HBMYobKvn4ofOsyKPGn6NXnCqIbCCvqOyBikxAYukgCmWHRJRGX4RjNbL" +
+ "BVjY5eoXJB7xisnrqOieXuEnZ9n7rnK8qM4RuOSA8EaDd5n58JU9SUUNRqpZZgK2nPy9Pv90ORiGr1Y30rZS" +
+ "bKT7SucjEZJ00WBF9FlJp6v8OcVvMBjRriaYYjVlOiLvVDQQ2NvYfbv5bLbEhkrJi5Nlg" +
+ "3Tq5jsgSTEBqSKTD5UIukFP194LvVMQIOQ9YM7m9iZHMpCCoIL99FJLsNmzRDVETCjyFoXxSputp6ufupS1n" +
+ "1SHRVlXm7Bx3bjJ79O3bGqjzxT1EZV39isegIyKx2H0zEUpnlXzzbusS0tusECmG3C3eGDOTs" +
+ "FZbYTp5ZxtXCrudDSX3kaeLtCstfqAHGsjHkPd87aSNaJJjPaSaMmGo7zTJGUIX1VCA2KJP37USIAa5NGHtM" +
+ "ChmtfO8kmrO9PZl6Ld18Yi7OlBsEUkMQE0yKwtSpkTK76XS5CG8S7S2S07vtYaBJJ9Bvuzr0F" +
+ "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW" +
+ "zdSmpjrFnnB8edB5AOekeHua16I9qcNHuCcOgeYZIc6GzG0O1XAcQu6cEi1ZivUPoYf2sKr4uPvcD" +
+ "gnaIN1KmhwSmxPgkErJVroPAUO18E2apxRlmZkhS6CInyzcLkvycSDCGtFaAZBO3QDO5nmvPFgVxfSbwG8BhhY" +
+ "cWXqwnsbEEejtlXH3Zr5BtxTzd3Bo08s8HxjIXF6Z0CPXcvQzDoemL8M2A1AIrnBkT7vIHgvMuH475M" +
+ "TXIR4K0njrS4X4KrBQFxvuZey8tnUnm8oiJWdUFzdM4N0KioJsG8UzxRODxKh4e3GqxmZxsSwwL0nNnV1syiCCC" +
+ "zTgrtT6fcxpAfcFeTct7FNd4BjzbNCgBrSspzhxnEFMZXuqBGaOS9d9qcuUulwF0lAWGBauWI57qyjXfQnQ" +
+ "i6Sy6nXOcUIOZWJ9BVJf4A27Pa4Pi7ZFznFnIdiQOrxCbb2ZCVkCftWsmcEMnXWXUkGOuA5JXo9YvGyPGq2wgO1wj" +
+ "qAKyqxhBVOL48L2D0PYU16Ursxe0ckoBYXJheQi2d1eIa0pTD78325f8jCHclqINvuvj0GZfJENlc1e" +
+ "ULPRd358aPnsx2DOmN1UojjBI1hacijCtFCE8zGCa9M0L7aZbRUHe8lmlaqhx0Su6nPnPgfbJr6idfxTJHqCT4t8" +
+ "4BfZeqRZ5rgIS15Z7HFYSCPZixMPf683GQoQEFWIM0EqNTJmoHW3K7jDHOUpVutyyWt5VO5ray6rBrq1nAF" +
+ "QEN59RqxM04eXxAOBWnPB17TdvDmyXuXDpjnjXReJLNqJVgB2VFPxsqhQWQupAtjBGvffU7exZMM92fiYdBArV" +
+ "4SE1mBFewTNRz4PmwFVmUoxWj74rzZQuDMhAlx3jBXcaX8eD7PlaADdiMT1mF3faVyScA6bHbV2jU79XvppOfoD" +
+ "YtBFj3a5LtAhTy5BnN2v1XlTQtk6MZ0Ej6g7sW96w9n2XV8wqdWGgjeKHaqH7Pn1XFw7IHvpVYK4wFvIGubp4bpms" +
+ "C3ARq1Gqq8zvDQtoLZSZYOvXCZOIElGZLscqjbRckX5aRhTJX6CxjVcT7S3TScnCbqNdfqMpEsNl2GY3fprQF" +
+ "CTtiZv12uCj0WILSesMc5ct2tQcIvwnOHAuE6fw7lD8EgQ0emU4zxUIDowhTvJ46k27rXTctIX7HlBEZXInV9r49" +
+ "VbJdA3des3ZqGPbBYXTwQcns1jJTmnIf1S0jLWN0Wgk9bH5gkdhl53l2yc1AlZCyJdm9vktH5sctTDdMZrDPPHNUG2" +
+ "pTBg4DDR9Zc6YvkrO4f5O3mfOl441bJkmOSNwoOc3krHTQlN6SBGLEptT4m7MFwqVyrbsEXHegwa53aN4W0J7qwV0" +
+ "EMN2VHLtoHQDfXVOVDXnE1rK3cDJRMhCIvIRmywkA5T9GchtDVfek2qZq1H5wfe92RoXBseAuMoWtTCJiXOJraCxmj" +
+ "cluokF3eK0NpycncoQcObLiS1rield0fdx8UJhsV9QnNtok5a0f4L1MKtjnYJmvItSqn3Lo2VkWagxGSEJzKnK2gO3pH" +
+ "Whlarr6bRQeIwCXckALEVdGZBTPiqjYPBfk5H5wYXqkieh04tjSmnWytNebBNmGjTNgrqNVO7ftCbhh7wICOn" +
+ "lpSMt6BoFvjHYW1IpEyTlVlvNl5NzPPAn2119ttZTfXpifXfQtBGzlCNYTD6m1FvpmOydzqEq8YadgybW76HDtnBdU" +
+ "M1djhNcHfR12NkPc7UIvVJDiTTJ440pU1tqYISyEVr5QZBrhOP2y6RsZnlJy7Mqh56Jw0fJkbI2yQaoc7Jh2Wsh7" +
+ "R58SXBXsalwNM9TmTeBMrc8Hghx9hDpai8agUclHTCoyK2hkEpKLlEJiXUKOE8JPugYE8yFVYF49UAjJUbsj6we3Ocii" +
+ "FXs6oXGymttSxcRksGdfUaIonkrqniea31SgiGmhCjKi0x5ZDNFS26CqSEU0FKiLJyhui8HOJCddX64Ers0VTMHppS" +
+ "ydpQX7PndzDuhT7k8Wj2kGJvKCqzVxTGCssDHoedKmMULEjUqU2EcjT5VOaCFeHKUXyP1B7qfYPtKLcgXHH5bmSgRs8gY" +
+ "2JkPOST2Vr35mNKoulUMqFeo0s1y5hcVY39a3mBMytwZn7HgPhEJScwZdWJd6E5tZ13evEmcn1A5YPBYbm91CdJFXhj" +
+ "iuqmJS71Xq4j56K35TmCJCb4jAAbcGTGEHzcCP1HKVFfsNnLqwflvHwMYQMA3EumrMn1nXnETZFdZJRHlnO8dwgnT" +
+ "ehbB2XtrpErgaFbEWfWEinoiMd4Vs7kgHzs8UiuagYyyCxmg5gEvza3CXzjUnG2lfjI6ox6EYPgXvRySHmL" +
+ "atXzj4x3CgF6j1gn10aUJknF7KQLJ84DIA5fy33YaLLbeOoGJHsdr9rQZCjaIqZKH870sslgm0tnGw5yOddnj" +
+ "FDI2KwL6UVGr3YExI1p5sGaY0Su4G30PMJsOX9ZWvRF72Lk0pVMnjVugkzsnQrbyGezZ8WN8y8kOvrysQuhTt5" +
+ "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ";
+
+ @Before
+ public void setup() {
+ allocator = RootAllocatorFactory.newRoot(16384);
+ drillBuf = allocator.buffer(8192);
+ charsetEncoder = Charset.forName("UTF-8").newEncoder();
+ }
@Test
public void testSqlRegexLike() {
@@ -30,7 +119,7 @@ public class TestSqlPatterns {
// Simple pattern should have meta (% and _) and escape characters removed.
// A%B is complex
- RegexpUtil.SqlPatternInfo patternInfo = RegexpUtil.sqlToRegexLike("A%B");
+ RegexpUtil.SqlPatternInfo patternInfo = RegexpUtil.sqlToRegexLike("A%B");
assertEquals("A.*B", patternInfo.getJavaPatternString());
assertEquals(RegexpUtil.SqlPatternType.COMPLEX, patternInfo.getPatternType());
@@ -108,567 +197,259 @@ public class TestSqlPatterns {
}
+ private void setDrillBuf(String input) {
+ drillBuf.clear();
+ charBuffer = CharBuffer.wrap(input);
+ try {
+ byteBuffer = charsetEncoder.encode(charBuffer);
+ } catch (CharacterCodingException e) {
+ throw new DrillRuntimeException("Error while encoding the pattern string ", e);
+ }
+ drillBuf.setBytes(0, byteBuffer, byteBuffer.position(), byteBuffer.remaining());
+ }
+
@Test
public void testSqlPatternStartsWith() {
- RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH,"", "ABC");
+ RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH, "", "ABC");
+ SqlPatternMatcher sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
- StringBuffer sb = new StringBuffer("ABCD");
- SqlPatternMatcher sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternStartsWith.match(), 1); // ABCD should match StartsWith ABC
+ setDrillBuf("ABCD");
+ assertEquals(1, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // ABCD should match StartsWith ABC
- sb.setLength(0);
- sb.append("BCD");
- assertEquals(sqlPatternStartsWith.match(), 0); // BCD should not match StartsWith ABC
+ setDrillBuf("BCD");
+ assertEquals(0, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // BCD should not match StartsWith ABC
- sb.setLength(0);
- sb.append("XYZABC");
- assertEquals(sqlPatternStartsWith.match(), 0); // XYZABC should not match StartsWith ABC
+ setDrillBuf("XYZABC");
+ assertEquals(0, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // XYZABC should not match StartsWith ABC
// null text
- sb.setLength(0);
- assertEquals(sqlPatternStartsWith.match(), 0); // null String should not match StartsWith ABC
+ setDrillBuf("");
+ assertEquals(0, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // null String should not match StartsWith ABC
// pattern length > txt length
- sb.append("AB");
- assertEquals(sqlPatternStartsWith.match(), 0); // AB should not match StartsWith ABC
+ setDrillBuf("AB");
+ assertEquals(0, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // AB should not match StartsWith ABC
// startsWith null pattern should match anything
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH,"", "");
- sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternStartsWith.match(), 1); // AB should match StartsWith null pattern
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH, "", "");
+ sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+
+ setDrillBuf("AB");
+ assertEquals(1, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // AB should match StartsWith null pattern
// null pattern and null text
- sb.setLength(0);
- assertEquals(sqlPatternStartsWith.match(), 1); // null text should match null pattern
+ setDrillBuf("");
+ assertEquals(1, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // null text should match null pattern
// wide character string.
- sb.append("b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4" +
- "ac6Bn1cxblsXFnkp8g8hiQkUMJPyl6l0jTdsIzQ4PkVCURGGyF0aduGqCXUaKp91gqkRMvL" +
- "g1Lh6u0NrGCBoJajPxnwZCyh58cN5aFiNscBFKIqqLPTS1vnbR39nmzU88FM8qDepJRhvein" +
- "hHhmrHdEb22QN20dXEHSygR7vrb2zZhhfWeJbXRsesuYDqdGig801IAS6VWRIdQtJ6gaRhCdNz" +
- " DWnQWRXlMhcrR4MKJXeBgDtjzbHd0ZS53K8u8ORl6FKxtvdKmwUuHiuMJrQQm6Rgx6WJrAtvTf" +
- "UE8a5I3nYXdRppnm3MbRsLu4IxXIblh8kmAIG6n2yHwGhpWYkRI7cwl4dOB3bsxxtdaaTlZMMx6T" +
- "XPaUK10UzfZCAkWG9Du3QhJxxJBZaP3HPebXmw1l5swPohmG3L6zOcEWp7f" +
- "saldC7TOrFa3ReYFHooclSGTgZ9sWjJ5SYJ0vEkI1RMWoeGcdJq5v4lrcB6YjrMqQJIaxAdRnIaNG" +
- "V6oR9SkI4diiXspIvRWj6PMkpqI02ovI3va49bHauTrqTyM9eIhS" +
- "0Mc3SHzknQwHJAFkqmhV9Lm2VLULou2iJDvc5sWW8W48IODGqGytqLogA01Cuo3gURmH2057nCld9" +
- "PDHQEieFMddi4gKPOv4es1YX2aBo4RfYiTlUyXd6gGujVPgU2j" +
- "AAhcz6JqVC08O73gM9zOAM2l4PwN2TN3lBufkQUGyOzHtoTDjSdQ2DPXIks9A6ehIpn92n1UtdrJeMz" +
- "4oMN4kwP95YjQk1ko2e3DVAiPVlCiaWqnzXKa41kLVs3KiBhfAff5" +
- "hoTnBGn9CaXed6g6kLs2YBTQYM9yLW9Wb5qNhLeCM4GGJM8dUWqqEsWYPrcPAkCMa6LXfgEcsCwQ6ij" +
- "JhhjcxwoafBRyyEvQ6Pfhg8IqJ0afBpAZHhR2y4I11zbaJZqs3WG3H3aQHT" +
- "wcPHdBHnk65GdL3Njuoo0K4mcmN6lk7pWptHwTjkw59zTw834PZ8TWm5XiUnsi9JKy41MPqHcbO0nN" +
- "SYl9Q6kEjv4nt8p9unhUYqgrGvLl42nvqGb1F47f6PvxkewuouxMFAszYhaMjZzIf5" +
- "AgmvaXbSP9MKYu6EkkvM9CIhYGZuq7PJUk6wmoG6IxIfOokUcnrGzuU9INFUuXf4LptQ987GU3hw0d" +
- "yMNf6nncwABOOoC5EnqYBNoq29Mf54H5k2Xi8y1fh8ldtKcW9T4WsaXun9fKofegfhwY8wgfoG" +
- "eW2YNW3fdalIsggRzMEAXVDxj7oieReUGiT53uV2kcmcQRQLdUDUcOC1JEiSRpgZl38c1DDVRlz8Rbhi" +
- "KUxMqNCPx6PABXCPocpfXJa0yBT0l3ssgMlDfKsxAHX6aEC86zk0CDmTqZPmBjLAoYaHA3" +
- "uGqoARbQ6rhIBHOdkb7PoRImjmF4sQ60TBIWdao9dqLMjslhOQrGQlPIniW5I1V9nisc5lV0jEqeaC3y" +
- "lSnjhieVJ7H0FYjcsihjQryhyRwUZBGxWFuh0hI9rOv8h5jHKb549hOHPcIjSdLa6M048G" +
- "9drX0LNEixfp7WUqq2DyRfBioybmoHVzFWzhXrMJXzwHakzLwb4T2BHcLK6VpC4b2GodYlZe43ggxTNUErif" +
- "NEfEfxZhDj6HBMYobKvn4ofOsyKPGn6NXnCqIbCCvqOyBikxAYukgCmWHRJRGX4RjNbL" +
- "BVjY5eoXJB7xisnrqOieXuEnZ9n7rnK8qM4RuOSA8EaDd5n58JU9SUUNRqpZZgK2nPy9Pv90ORiGr1Y30rZS" +
- "bKT7SucjEZJ00WBF9FlJp6v8OcVvMBjRriaYYjVlOiLvVDQQ2NvYfbv5bLbEhkrJi5Nlg" +
- "3Tq5jsgSTEBqSKTD5UIukFP194LvVMQIOQ9YM7m9iZHMpCCoIL99FJLsNmzRDVETCjyFoXxSputp6ufupS1n" +
- "1SHRVlXm7Bx3bjJ79O3bGqjzxT1EZV39isegIyKx2H0zEUpnlXzzbusS0tusECmG3C3eGDOTs" +
- "FZbYTp5ZxtXCrudDSX3kaeLtCstfqAHGsjHkPd87aSNaJJjPaSaMmGo7zTJGUIX1VCA2KJP37USIAa5NGHtM" +
- "ChmtfO8kmrO9PZl6Ld18Yi7OlBsEUkMQE0yKwtSpkTK76XS5CG8S7S2S07vtYaBJJ9Bvuzr0F" +
- "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW" +
- "zdSmpjrFnnB8edB5AOekeHua16I9qcNHuCcOgeYZIc6GzG0O1XAcQu6cEi1ZivUPoYf2sKr4uPvcD" +
- "gnaIN1KmhwSmxPgkErJVroPAUO18E2apxRlmZkhS6CInyzcLkvycSDCGtFaAZBO3QDO5nmvPFgVxfSbwG8BhhY" +
- "cWXqwnsbEEejtlXH3Zr5BtxTzd3Bo08s8HxjIXF6Z0CPXcvQzDoemL8M2A1AIrnBkT7vIHgvMuH475M" +
- "TXIR4K0njrS4X4KrBQFxvuZey8tnUnm8oiJWdUFzdM4N0KioJsG8UzxRODxKh4e3GqxmZxsSwwL0nNnV1syiCCC" +
- "zTgrtT6fcxpAfcFeTct7FNd4BjzbNCgBrSspzhxnEFMZXuqBGaOS9d9qcuUulwF0lAWGBauWI57qyjXfQnQ" +
- "i6Sy6nXOcUIOZWJ9BVJf4A27Pa4Pi7ZFznFnIdiQOrxCbb2ZCVkCftWsmcEMnXWXUkGOuA5JXo9YvGyPGq2wgO1wj" +
- "qAKyqxhBVOL48L2D0PYU16Ursxe0ckoBYXJheQi2d1eIa0pTD78325f8jCHclqINvuvj0GZfJENlc1e" +
- "ULPRd358aPnsx2DOmN1UojjBI1hacijCtFCE8zGCa9M0L7aZbRUHe8lmlaqhx0Su6nPnPgfbJr6idfxTJHqCT4t8" +
- "4BfZeqRZ5rgIS15Z7HFYSCPZixMPf683GQoQEFWIM0EqNTJmoHW3K7jDHOUpVutyyWt5VO5ray6rBrq1nAF" +
- "QEN59RqxM04eXxAOBWnPB17TdvDmyXuXDpjnjXReJLNqJVgB2VFPxsqhQWQupAtjBGvffU7exZMM92fiYdBArV" +
- "4SE1mBFewTNRz4PmwFVmUoxWj74rzZQuDMhAlx3jBXcaX8eD7PlaADdiMT1mF3faVyScA6bHbV2jU79XvppOfoD" +
- "YtBFj3a5LtAhTy5BnN2v1XlTQtk6MZ0Ej6g7sW96w9n2XV8wqdWGgjeKHaqH7Pn1XFw7IHvpVYK4wFvIGubp4bpms" +
- "C3ARq1Gqq8zvDQtoLZSZYOvXCZOIElGZLscqjbRckX5aRhTJX6CxjVcT7S3TScnCbqNdfqMpEsNl2GY3fprQF" +
- "CTtiZv12uCj0WILSesMc5ct2tQcIvwnOHAuE6fw7lD8EgQ0emU4zxUIDowhTvJ46k27rXTctIX7HlBEZXInV9r49" +
- "VbJdA3des3ZqGPbBYXTwQcns1jJTmnIf1S0jLWN0Wgk9bH5gkdhl53l2yc1AlZCyJdm9vktH5sctTDdMZrDPPHNUG2" +
- "pTBg4DDR9Zc6YvkrO4f5O3mfOl441bJkmOSNwoOc3krHTQlN6SBGLEptT4m7MFwqVyrbsEXHegwa53aN4W0J7qwV0" +
- "EMN2VHLtoHQDfXVOVDXnE1rK3cDJRMhCIvIRmywkA5T9GchtDVfek2qZq1H5wfe92RoXBseAuMoWtTCJiXOJraCxmj" +
- "cluokF3eK0NpycncoQcObLiS1rield0fdx8UJhsV9QnNtok5a0f4L1MKtjnYJmvItSqn3Lo2VkWagxGSEJzKnK2gO3pH" +
- "Whlarr6bRQeIwCXckALEVdGZBTPiqjYPBfk5H5wYXqkieh04tjSmnWytNebBNmGjTNgrqNVO7ftCbhh7wICOn" +
- "lpSMt6BoFvjHYW1IpEyTlVlvNl5NzPPAn2119ttZTfXpifXfQtBGzlCNYTD6m1FvpmOydzqEq8YadgybW76HDtnBdU" +
- "M1djhNcHfR12NkPc7UIvVJDiTTJ440pU1tqYISyEVr5QZBrhOP2y6RsZnlJy7Mqh56Jw0fJkbI2yQaoc7Jh2Wsh7" +
- "R58SXBXsalwNM9TmTeBMrc8Hghx9hDpai8agUclHTCoyK2hkEpKLlEJiXUKOE8JPugYE8yFVYF49UAjJUbsj6we3Ocii" +
- "FXs6oXGymttSxcRksGdfUaIonkrqniea31SgiGmhCjKi0x5ZDNFS26CqSEU0FKiLJyhui8HOJCddX64Ers0VTMHppS" +
- "ydpQX7PndzDuhT7k8Wj2kGJvKCqzVxTGCssDHoedKmMULEjUqU2EcjT5VOaCFeHKUXyP1B7qfYPtKLcgXHH5bmSgRs8gY" +
- "2JkPOST2Vr35mNKoulUMqFeo0s1y5hcVY39a3mBMytwZn7HgPhEJScwZdWJd6E5tZ13evEmcn1A5YPBYbm91CdJFXhj" +
- "iuqmJS71Xq4j56K35TmCJCb4jAAbcGTGEHzcCP1HKVFfsNnLqwflvHwMYQMA3EumrMn1nXnETZFdZJRHlnO8dwgnT" +
- "ehbB2XtrpErgaFbEWfWEinoiMd4Vs7kgHzs8UiuagYyyCxmg5gEvza3CXzjUnG2lfjI6ox6EYPgXvRySHmL" +
- "atXzj4x3CgF6j1gn10aUJknF7KQLJ84DIA5fy33YaLLbeOoGJHsdr9rQZCjaIqZKH870sslgm0tnGw5yOddnj" +
- "FDI2KwL6UVGr3YExI1p5sGaY0Su4G30PMJsOX9ZWvRF72Lk0pVMnjVugkzsnQrbyGezZ8WN8y8kOvrysQuhTt5" +
- "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
-
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH,"",
- "b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4");
- sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternStartsWith.match(), 1); // should match
-
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH,"",
- "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
- sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternStartsWith.match(), 0); // should not match
+ setDrillBuf(wideString);
- // non ascii
- sb.setLength(0);
- sb.append("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH, "", "b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4");
+ sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // should match
- assertEquals(sqlPatternStartsWith.match(), 0); // should not match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH, "", "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
+ sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(0, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // should not match
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH,"", "¤EÀsÆW");
- sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternStartsWith.match(), 1); // should match
+ // non ascii
+ setDrillBuf("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ assertEquals(0, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // should not match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.STARTS_WITH, "", "¤EÀsÆW");
+ sqlPatternStartsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ setDrillBuf("¤EÀsÆW");
+ assertEquals(1, sqlPatternStartsWith.match(0, byteBuffer.limit(), drillBuf)); // should match
}
@Test
public void testSqlPatternEndsWith() {
RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH, "", "BCD");
- StringBuffer sb = new StringBuffer("ABCD");
- SqlPatternMatcher sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
+ SqlPatternMatcher sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
- assertEquals(sqlPatternEndsWith.match(), 1); // ABCD should match EndsWith BCD
+ setDrillBuf("ABCD");
+ assertEquals(1, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // ABCD should match EndsWith BCD
- sb.setLength(0);
- sb.append("ABC");
- assertEquals(sqlPatternEndsWith.match(), 0); // ABC should not match EndsWith BCD
+ setDrillBuf("ABC");
+ assertEquals(0, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // ABC should not match EndsWith BCD
- sb.setLength(0);
- assertEquals(sqlPatternEndsWith.match(), 0); // null string should not match EndsWith BCD
+ setDrillBuf("");
+ assertEquals(0, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // null string should not match EndsWith BCD
- sb.append("A");
- assertEquals(sqlPatternEndsWith.match(), 0); // ABCD should not match EndsWith A
+ setDrillBuf("A");
+ assertEquals(0, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // ABCD should not match EndsWith A
- sb.setLength(0);
- sb.append("XYZBCD");
- assertEquals(sqlPatternEndsWith.match(), 1); // XYZBCD should match EndsWith BCD
+ setDrillBuf("XYZBCD");
+ assertEquals(1, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // XYZBCD should match EndsWith BCD
// EndsWith null pattern should match anything
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH,"", "");
- sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternEndsWith.match(), 1); // AB should match StartsWith null pattern
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH, "", "");
+ sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // AB should match StartsWith null pattern
// null pattern and null text
- sb.setLength(0);
- assertEquals(sqlPatternEndsWith.match(), 1); // null text should match null pattern
+ setDrillBuf("");
+ assertEquals(1, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // null text should match null pattern
// wide character string.
- sb.append("b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4" +
- "ac6Bn1cxblsXFnkp8g8hiQkUMJPyl6l0jTdsIzQ4PkVCURGGyF0aduGqCXUaKp91gqkRMvL" +
- "g1Lh6u0NrGCBoJajPxnwZCyh58cN5aFiNscBFKIqqLPTS1vnbR39nmzU88FM8qDepJRhvein" +
- "hHhmrHdEb22QN20dXEHSygR7vrb2zZhhfWeJbXRsesuYDqdGig801IAS6VWRIdQtJ6gaRhCdNz" +
- " DWnQWRXlMhcrR4MKJXeBgDtjzbHd0ZS53K8u8ORl6FKxtvdKmwUuHiuMJrQQm6Rgx6WJrAtvTf" +
- "UE8a5I3nYXdRppnm3MbRsLu4IxXIblh8kmAIG6n2yHwGhpWYkRI7cwl4dOB3bsxxtdaaTlZMMx6T" +
- "XPaUK10UzfZCAkWG9Du3QhJxxJBZaP3HPebXmw1l5swPohmG3L6zOcEWp7f" +
- "saldC7TOrFa3ReYFHooclSGTgZ9sWjJ5SYJ0vEkI1RMWoeGcdJq5v4lrcB6YjrMqQJIaxAdRnIaNG" +
- "V6oR9SkI4diiXspIvRWj6PMkpqI02ovI3va49bHauTrqTyM9eIhS" +
- "0Mc3SHzknQwHJAFkqmhV9Lm2VLULou2iJDvc5sWW8W48IODGqGytqLogA01Cuo3gURmH2057nCld9" +
- "PDHQEieFMddi4gKPOv4es1YX2aBo4RfYiTlUyXd6gGujVPgU2j" +
- "AAhcz6JqVC08O73gM9zOAM2l4PwN2TN3lBufkQUGyOzHtoTDjSdQ2DPXIks9A6ehIpn92n1UtdrJeMz" +
- "4oMN4kwP95YjQk1ko2e3DVAiPVlCiaWqnzXKa41kLVs3KiBhfAff5" +
- "hoTnBGn9CaXed6g6kLs2YBTQYM9yLW9Wb5qNhLeCM4GGJM8dUWqqEsWYPrcPAkCMa6LXfgEcsCwQ6ij" +
- "JhhjcxwoafBRyyEvQ6Pfhg8IqJ0afBpAZHhR2y4I11zbaJZqs3WG3H3aQHT" +
- "wcPHdBHnk65GdL3Njuoo0K4mcmN6lk7pWptHwTjkw59zTw834PZ8TWm5XiUnsi9JKy41MPqHcbO0nN" +
- "SYl9Q6kEjv4nt8p9unhUYqgrGvLl42nvqGb1F47f6PvxkewuouxMFAszYhaMjZzIf5" +
- "AgmvaXbSP9MKYu6EkkvM9CIhYGZuq7PJUk6wmoG6IxIfOokUcnrGzuU9INFUuXf4LptQ987GU3hw0d" +
- "yMNf6nncwABOOoC5EnqYBNoq29Mf54H5k2Xi8y1fh8ldtKcW9T4WsaXun9fKofegfhwY8wgfoG" +
- "eW2YNW3fdalIsggRzMEAXVDxj7oieReUGiT53uV2kcmcQRQLdUDUcOC1JEiSRpgZl38c1DDVRlz8Rbhi" +
- "KUxMqNCPx6PABXCPocpfXJa0yBT0l3ssgMlDfKsxAHX6aEC86zk0CDmTqZPmBjLAoYaHA3" +
- "uGqoARbQ6rhIBHOdkb7PoRImjmF4sQ60TBIWdao9dqLMjslhOQrGQlPIniW5I1V9nisc5lV0jEqeaC3y" +
- "lSnjhieVJ7H0FYjcsihjQryhyRwUZBGxWFuh0hI9rOv8h5jHKb549hOHPcIjSdLa6M048G" +
- "9drX0LNEixfp7WUqq2DyRfBioybmoHVzFWzhXrMJXzwHakzLwb4T2BHcLK6VpC4b2GodYlZe43ggxTNUErif" +
- "NEfEfxZhDj6HBMYobKvn4ofOsyKPGn6NXnCqIbCCvqOyBikxAYukgCmWHRJRGX4RjNbL" +
- "BVjY5eoXJB7xisnrqOieXuEnZ9n7rnK8qM4RuOSA8EaDd5n58JU9SUUNRqpZZgK2nPy9Pv90ORiGr1Y30rZS" +
- "bKT7SucjEZJ00WBF9FlJp6v8OcVvMBjRriaYYjVlOiLvVDQQ2NvYfbv5bLbEhkrJi5Nlg" +
- "3Tq5jsgSTEBqSKTD5UIukFP194LvVMQIOQ9YM7m9iZHMpCCoIL99FJLsNmzRDVETCjyFoXxSputp6ufupS1n" +
- "1SHRVlXm7Bx3bjJ79O3bGqjzxT1EZV39isegIyKx2H0zEUpnlXzzbusS0tusECmG3C3eGDOTs" +
- "FZbYTp5ZxtXCrudDSX3kaeLtCstfqAHGsjHkPd87aSNaJJjPaSaMmGo7zTJGUIX1VCA2KJP37USIAa5NGHtM" +
- "ChmtfO8kmrO9PZl6Ld18Yi7OlBsEUkMQE0yKwtSpkTK76XS5CG8S7S2S07vtYaBJJ9Bvuzr0F" +
- "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW" +
- "zdSmpjrFnnB8edB5AOekeHua16I9qcNHuCcOgeYZIc6GzG0O1XAcQu6cEi1ZivUPoYf2sKr4uPvcD" +
- "gnaIN1KmhwSmxPgkErJVroPAUO18E2apxRlmZkhS6CInyzcLkvycSDCGtFaAZBO3QDO5nmvPFgVxfSbwG8BhhY" +
- "cWXqwnsbEEejtlXH3Zr5BtxTzd3Bo08s8HxjIXF6Z0CPXcvQzDoemL8M2A1AIrnBkT7vIHgvMuH475M" +
- "TXIR4K0njrS4X4KrBQFxvuZey8tnUnm8oiJWdUFzdM4N0KioJsG8UzxRODxKh4e3GqxmZxsSwwL0nNnV1syiCCC" +
- "zTgrtT6fcxpAfcFeTct7FNd4BjzbNCgBrSspzhxnEFMZXuqBGaOS9d9qcuUulwF0lAWGBauWI57qyjXfQnQ" +
- "i6Sy6nXOcUIOZWJ9BVJf4A27Pa4Pi7ZFznFnIdiQOrxCbb2ZCVkCftWsmcEMnXWXUkGOuA5JXo9YvGyPGq2wgO1wj" +
- "qAKyqxhBVOL48L2D0PYU16Ursxe0ckoBYXJheQi2d1eIa0pTD78325f8jCHclqINvuvj0GZfJENlc1e" +
- "ULPRd358aPnsx2DOmN1UojjBI1hacijCtFCE8zGCa9M0L7aZbRUHe8lmlaqhx0Su6nPnPgfbJr6idfxTJHqCT4t8" +
- "4BfZeqRZ5rgIS15Z7HFYSCPZixMPf683GQoQEFWIM0EqNTJmoHW3K7jDHOUpVutyyWt5VO5ray6rBrq1nAF" +
- "QEN59RqxM04eXxAOBWnPB17TdvDmyXuXDpjnjXReJLNqJVgB2VFPxsqhQWQupAtjBGvffU7exZMM92fiYdBArV" +
- "4SE1mBFewTNRz4PmwFVmUoxWj74rzZQuDMhAlx3jBXcaX8eD7PlaADdiMT1mF3faVyScA6bHbV2jU79XvppOfoD" +
- "YtBFj3a5LtAhTy5BnN2v1XlTQtk6MZ0Ej6g7sW96w9n2XV8wqdWGgjeKHaqH7Pn1XFw7IHvpVYK4wFvIGubp4bpms" +
- "C3ARq1Gqq8zvDQtoLZSZYOvXCZOIElGZLscqjbRckX5aRhTJX6CxjVcT7S3TScnCbqNdfqMpEsNl2GY3fprQF" +
- "CTtiZv12uCj0WILSesMc5ct2tQcIvwnOHAuE6fw7lD8EgQ0emU4zxUIDowhTvJ46k27rXTctIX7HlBEZXInV9r49" +
- "VbJdA3des3ZqGPbBYXTwQcns1jJTmnIf1S0jLWN0Wgk9bH5gkdhl53l2yc1AlZCyJdm9vktH5sctTDdMZrDPPHNUG2" +
- "pTBg4DDR9Zc6YvkrO4f5O3mfOl441bJkmOSNwoOc3krHTQlN6SBGLEptT4m7MFwqVyrbsEXHegwa53aN4W0J7qwV0" +
- "EMN2VHLtoHQDfXVOVDXnE1rK3cDJRMhCIvIRmywkA5T9GchtDVfek2qZq1H5wfe92RoXBseAuMoWtTCJiXOJraCxmj" +
- "cluokF3eK0NpycncoQcObLiS1rield0fdx8UJhsV9QnNtok5a0f4L1MKtjnYJmvItSqn3Lo2VkWagxGSEJzKnK2gO3pH" +
- "Whlarr6bRQeIwCXckALEVdGZBTPiqjYPBfk5H5wYXqkieh04tjSmnWytNebBNmGjTNgrqNVO7ftCbhh7wICOn" +
- "lpSMt6BoFvjHYW1IpEyTlVlvNl5NzPPAn2119ttZTfXpifXfQtBGzlCNYTD6m1FvpmOydzqEq8YadgybW76HDtnBdU" +
- "M1djhNcHfR12NkPc7UIvVJDiTTJ440pU1tqYISyEVr5QZBrhOP2y6RsZnlJy7Mqh56Jw0fJkbI2yQaoc7Jh2Wsh7" +
- "R58SXBXsalwNM9TmTeBMrc8Hghx9hDpai8agUclHTCoyK2hkEpKLlEJiXUKOE8JPugYE8yFVYF49UAjJUbsj6we3Ocii" +
- "FXs6oXGymttSxcRksGdfUaIonkrqniea31SgiGmhCjKi0x5ZDNFS26CqSEU0FKiLJyhui8HOJCddX64Ers0VTMHppS" +
- "ydpQX7PndzDuhT7k8Wj2kGJvKCqzVxTGCssDHoedKmMULEjUqU2EcjT5VOaCFeHKUXyP1B7qfYPtKLcgXHH5bmSgRs8gY" +
- "2JkPOST2Vr35mNKoulUMqFeo0s1y5hcVY39a3mBMytwZn7HgPhEJScwZdWJd6E5tZ13evEmcn1A5YPBYbm91CdJFXhj" +
- "iuqmJS71Xq4j56K35TmCJCb4jAAbcGTGEHzcCP1HKVFfsNnLqwflvHwMYQMA3EumrMn1nXnETZFdZJRHlnO8dwgnT" +
- "ehbB2XtrpErgaFbEWfWEinoiMd4Vs7kgHzs8UiuagYyyCxmg5gEvza3CXzjUnG2lfjI6ox6EYPgXvRySHmL" +
- "atXzj4x3CgF6j1gn10aUJknF7KQLJ84DIA5fy33YaLLbeOoGJHsdr9rQZCjaIqZKH870sslgm0tnGw5yOddnj" +
- "FDI2KwL6UVGr3YExI1p5sGaY0Su4G30PMJsOX9ZWvRF72Lk0pVMnjVugkzsnQrbyGezZ8WN8y8kOvrysQuhTt5" +
- "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
-
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH,"",
- "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
- sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternEndsWith.match(), 1); // should match
-
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH,"",
- "atXzj4x3CgF6j1gn10aUJknF7KQLJ84D");
- sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternEndsWith.match(), 0); // should not match
+ setDrillBuf(wideString);
- // non ascii
- sb.setLength(0);
- sb.append("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH, "", "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
+ sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // should match
+
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH, "", "");
+ sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
- assertEquals(sqlPatternEndsWith.match(), 0); // should not match
+ setDrillBuf("");
+ assertEquals(1, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // null text should match null pattern
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH,"", "TÆU2~~");
- sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternEndsWith.match(), 1); // should match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH, "", "atXzj4x3CgF6j1gn10aUJknF7KQLJ84D");
+ sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(0, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // should not match
+ // non ascii
+ setDrillBuf("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ assertEquals(0, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // should not match
+
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.ENDS_WITH, "", "TÆU2~~");
+ sqlPatternEndsWith = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternEndsWith.match(0, byteBuffer.limit(), drillBuf)); // should match
}
@Test
public void testSqlPatternContains() {
- RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS,".*ABC.*", "ABCD");
+ RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS, ".*ABC.*", "ABCD");
+ SqlPatternMatcher sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
- StringBuffer sb = new StringBuffer("ABCD");
- SqlPatternMatcher sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
+ setDrillBuf("ABCD");
+ assertEquals(1, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // ABCD should contain ABCD
- assertEquals(1, sqlPatternContains.match()); // ABCD should contain ABCD
+ setDrillBuf("BC");
+ assertEquals(0, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // BC cannot contain ABCD
- sb.setLength(0);
- sb.append("BC");
- assertEquals(0, sqlPatternContains.match()); // ABCD should contain BC
+ setDrillBuf("");
+ assertEquals(0, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // null string should not match contains ABCD
- sb.setLength(0);
- assertEquals(0, sqlPatternContains.match()); // null string should not match contains ABCD
+ setDrillBuf("DE");
+ assertEquals(0, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // ABCD should not contain DE
- sb.append("DE");
- assertEquals(0, sqlPatternContains.match()); // ABCD should not contain DE
-
- sb.setLength(0);
- sb.append("xyzABCDqrs");
- assertEquals(1, sqlPatternContains.match()); // xyzABCDqrs should contain ABCD
+ setDrillBuf("xyzABCDqrs");
+ assertEquals(1, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // xyzABCDqrs should contain ABCD
// contains null pattern should match anything
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS,"", "");
- sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternContains.match(), 1); // should match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS, "", "");
+ sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+
+ setDrillBuf("xyzABCDqrs");
+ assertEquals(1, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // should match
// null pattern and null text
- sb.setLength(0);
- assertEquals(sqlPatternContains.match(), 1); // null text should match null pattern
+ setDrillBuf("");
+ assertEquals(1, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // null text should match null pattern
// wide character string.
- sb.append("b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4" +
- "ac6Bn1cxblsXFnkp8g8hiQkUMJPyl6l0jTdsIzQ4PkVCURGGyF0aduGqCXUaKp91gqkRMvL" +
- "g1Lh6u0NrGCBoJajPxnwZCyh58cN5aFiNscBFKIqqLPTS1vnbR39nmzU88FM8qDepJRhvein" +
- "hHhmrHdEb22QN20dXEHSygR7vrb2zZhhfWeJbXRsesuYDqdGig801IAS6VWRIdQtJ6gaRhCdNz" +
- " DWnQWRXlMhcrR4MKJXeBgDtjzbHd0ZS53K8u8ORl6FKxtvdKmwUuHiuMJrQQm6Rgx6WJrAtvTf" +
- "UE8a5I3nYXdRppnm3MbRsLu4IxXIblh8kmAIG6n2yHwGhpWYkRI7cwl4dOB3bsxxtdaaTlZMMx6T" +
- "XPaUK10UzfZCAkWG9Du3QhJxxJBZaP3HPebXmw1l5swPohmG3L6zOcEWp7f" +
- "saldC7TOrFa3ReYFHooclSGTgZ9sWjJ5SYJ0vEkI1RMWoeGcdJq5v4lrcB6YjrMqQJIaxAdRnIaNG" +
- "V6oR9SkI4diiXspIvRWj6PMkpqI02ovI3va49bHauTrqTyM9eIhS" +
- "0Mc3SHzknQwHJAFkqmhV9Lm2VLULou2iJDvc5sWW8W48IODGqGytqLogA01Cuo3gURmH2057nCld9" +
- "PDHQEieFMddi4gKPOv4es1YX2aBo4RfYiTlUyXd6gGujVPgU2j" +
- "AAhcz6JqVC08O73gM9zOAM2l4PwN2TN3lBufkQUGyOzHtoTDjSdQ2DPXIks9A6ehIpn92n1UtdrJeMz" +
- "4oMN4kwP95YjQk1ko2e3DVAiPVlCiaWqnzXKa41kLVs3KiBhfAff5" +
- "hoTnBGn9CaXed6g6kLs2YBTQYM9yLW9Wb5qNhLeCM4GGJM8dUWqqEsWYPrcPAkCMa6LXfgEcsCwQ6ij" +
- "JhhjcxwoafBRyyEvQ6Pfhg8IqJ0afBpAZHhR2y4I11zbaJZqs3WG3H3aQHT" +
- "wcPHdBHnk65GdL3Njuoo0K4mcmN6lk7pWptHwTjkw59zTw834PZ8TWm5XiUnsi9JKy41MPqHcbO0nN" +
- "SYl9Q6kEjv4nt8p9unhUYqgrGvLl42nvqGb1F47f6PvxkewuouxMFAszYhaMjZzIf5" +
- "AgmvaXbSP9MKYu6EkkvM9CIhYGZuq7PJUk6wmoG6IxIfOokUcnrGzuU9INFUuXf4LptQ987GU3hw0d" +
- "yMNf6nncwABOOoC5EnqYBNoq29Mf54H5k2Xi8y1fh8ldtKcW9T4WsaXun9fKofegfhwY8wgfoG" +
- "eW2YNW3fdalIsggRzMEAXVDxj7oieReUGiT53uV2kcmcQRQLdUDUcOC1JEiSRpgZl38c1DDVRlz8Rbhi" +
- "KUxMqNCPx6PABXCPocpfXJa0yBT0l3ssgMlDfKsxAHX6aEC86zk0CDmTqZPmBjLAoYaHA3" +
- "uGqoARbQ6rhIBHOdkb7PoRImjmF4sQ60TBIWdao9dqLMjslhOQrGQlPIniW5I1V9nisc5lV0jEqeaC3y" +
- "lSnjhieVJ7H0FYjcsihjQryhyRwUZBGxWFuh0hI9rOv8h5jHKb549hOHPcIjSdLa6M048G" +
- "9drX0LNEixfp7WUqq2DyRfBioybmoHVzFWzhXrMJXzwHakzLwb4T2BHcLK6VpC4b2GodYlZe43ggxTNUErif" +
- "NEfEfxZhDj6HBMYobKvn4ofOsyKPGn6NXnCqIbCCvqOyBikxAYukgCmWHRJRGX4RjNbL" +
- "BVjY5eoXJB7xisnrqOieXuEnZ9n7rnK8qM4RuOSA8EaDd5n58JU9SUUNRqpZZgK2nPy9Pv90ORiGr1Y30rZS" +
- "bKT7SucjEZJ00WBF9FlJp6v8OcVvMBjRriaYYjVlOiLvVDQQ2NvYfbv5bLbEhkrJi5Nlg" +
- "3Tq5jsgSTEBqSKTD5UIukFP194LvVMQIOQ9YM7m9iZHMpCCoIL99FJLsNmzRDVETCjyFoXxSputp6ufupS1n" +
- "1SHRVlXm7Bx3bjJ79O3bGqjzxT1EZV39isegIyKx2H0zEUpnlXzzbusS0tusECmG3C3eGDOTs" +
- "FZbYTp5ZxtXCrudDSX3kaeLtCstfqAHGsjHkPd87aSNaJJjPaSaMmGo7zTJGUIX1VCA2KJP37USIAa5NGHtM" +
- "ChmtfO8kmrO9PZl6Ld18Yi7OlBsEUkMQE0yKwtSpkTK76XS5CG8S7S2S07vtYaBJJ9Bvuzr0F" +
- "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW" +
- "zdSmpjrFnnB8edB5AOekeHua16I9qcNHuCcOgeYZIc6GzG0O1XAcQu6cEi1ZivUPoYf2sKr4uPvcD" +
- "gnaIN1KmhwSmxPgkErJVroPAUO18E2apxRlmZkhS6CInyzcLkvycSDCGtFaAZBO3QDO5nmvPFgVxfSbwG8BhhY" +
- "cWXqwnsbEEejtlXH3Zr5BtxTzd3Bo08s8HxjIXF6Z0CPXcvQzDoemL8M2A1AIrnBkT7vIHgvMuH475M" +
- "TXIR4K0njrS4X4KrBQFxvuZey8tnUnm8oiJWdUFzdM4N0KioJsG8UzxRODxKh4e3GqxmZxsSwwL0nNnV1syiCCC" +
- "zTgrtT6fcxpAfcFeTct7FNd4BjzbNCgBrSspzhxnEFMZXuqBGaOS9d9qcuUulwF0lAWGBauWI57qyjXfQnQ" +
- "i6Sy6nXOcUIOZWJ9BVJf4A27Pa4Pi7ZFznFnIdiQOrxCbb2ZCVkCftWsmcEMnXWXUkGOuA5JXo9YvGyPGq2wgO1wj" +
- "qAKyqxhBVOL48L2D0PYU16Ursxe0ckoBYXJheQi2d1eIa0pTD78325f8jCHclqINvuvj0GZfJENlc1e" +
- "ULPRd358aPnsx2DOmN1UojjBI1hacijCtFCE8zGCa9M0L7aZbRUHe8lmlaqhx0Su6nPnPgfbJr6idfxTJHqCT4t8" +
- "4BfZeqRZ5rgIS15Z7HFYSCPZixMPf683GQoQEFWIM0EqNTJmoHW3K7jDHOUpVutyyWt5VO5ray6rBrq1nAF" +
- "QEN59RqxM04eXxAOBWnPB17TdvDmyXuXDpjnjXReJLNqJVgB2VFPxsqhQWQupAtjBGvffU7exZMM92fiYdBArV" +
- "4SE1mBFewTNRz4PmwFVmUoxWj74rzZQuDMhAlx3jBXcaX8eD7PlaADdiMT1mF3faVyScA6bHbV2jU79XvppOfoD" +
- "YtBFj3a5LtAhTy5BnN2v1XlTQtk6MZ0Ej6g7sW96w9n2XV8wqdWGgjeKHaqH7Pn1XFw7IHvpVYK4wFvIGubp4bpms" +
- "C3ARq1Gqq8zvDQtoLZSZYOvXCZOIElGZLscqjbRckX5aRhTJX6CxjVcT7S3TScnCbqNdfqMpEsNl2GY3fprQF" +
- "CTtiZv12uCj0WILSesMc5ct2tQcIvwnOHAuE6fw7lD8EgQ0emU4zxUIDowhTvJ46k27rXTctIX7HlBEZXInV9r49" +
- "VbJdA3des3ZqGPbBYXTwQcns1jJTmnIf1S0jLWN0Wgk9bH5gkdhl53l2yc1AlZCyJdm9vktH5sctTDdMZrDPPHNUG2" +
- "pTBg4DDR9Zc6YvkrO4f5O3mfOl441bJkmOSNwoOc3krHTQlN6SBGLEptT4m7MFwqVyrbsEXHegwa53aN4W0J7qwV0" +
- "EMN2VHLtoHQDfXVOVDXnE1rK3cDJRMhCIvIRmywkA5T9GchtDVfek2qZq1H5wfe92RoXBseAuMoWtTCJiXOJraCxmj" +
- "cluokF3eK0NpycncoQcObLiS1rield0fdx8UJhsV9QnNtok5a0f4L1MKtjnYJmvItSqn3Lo2VkWagxGSEJzKnK2gO3pH" +
- "Whlarr6bRQeIwCXckALEVdGZBTPiqjYPBfk5H5wYXqkieh04tjSmnWytNebBNmGjTNgrqNVO7ftCbhh7wICOn" +
- "lpSMt6BoFvjHYW1IpEyTlVlvNl5NzPPAn2119ttZTfXpifXfQtBGzlCNYTD6m1FvpmOydzqEq8YadgybW76HDtnBdU" +
- "M1djhNcHfR12NkPc7UIvVJDiTTJ440pU1tqYISyEVr5QZBrhOP2y6RsZnlJy7Mqh56Jw0fJkbI2yQaoc7Jh2Wsh7" +
- "R58SXBXsalwNM9TmTeBMrc8Hghx9hDpai8agUclHTCoyK2hkEpKLlEJiXUKOE8JPugYE8yFVYF49UAjJUbsj6we3Ocii" +
- "FXs6oXGymttSxcRksGdfUaIonkrqniea31SgiGmhCjKi0x5ZDNFS26CqSEU0FKiLJyhui8HOJCddX64Ers0VTMHppS" +
- "ydpQX7PndzDuhT7k8Wj2kGJvKCqzVxTGCssDHoedKmMULEjUqU2EcjT5VOaCFeHKUXyP1B7qfYPtKLcgXHH5bmSgRs8gY" +
- "2JkPOST2Vr35mNKoulUMqFeo0s1y5hcVY39a3mBMytwZn7HgPhEJScwZdWJd6E5tZ13evEmcn1A5YPBYbm91CdJFXhj" +
- "iuqmJS71Xq4j56K35TmCJCb4jAAbcGTGEHzcCP1HKVFfsNnLqwflvHwMYQMA3EumrMn1nXnETZFdZJRHlnO8dwgnT" +
- "ehbB2XtrpErgaFbEWfWEinoiMd4Vs7kgHzs8UiuagYyyCxmg5gEvza3CXzjUnG2lfjI6ox6EYPgXvRySHmL" +
- "atXzj4x3CgF6j1gn10aUJknF7KQLJ84DIA5fy33YaLLbeOoGJHsdr9rQZCjaIqZKH870sslgm0tnGw5yOddnj" +
- "FDI2KwL6UVGr3YExI1p5sGaY0Su4G30PMJsOX9ZWvRF72Lk0pVMnjVugkzsnQrbyGezZ8WN8y8kOvrysQuhTt5" +
- "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
-
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS,"",
- "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW");
- sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternContains.match(), 1);
-
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS,"",
- "ABCDEF");
- sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternContains.match(), 0);
+ setDrillBuf(wideString);
- // non ascii
- sb.setLength(0);
- sb.append("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS, "", "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW");
+ sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+
+ assertEquals(1, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf));
- assertEquals(sqlPatternContains.match(), 0); // should not match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS, "", "ABCDEF");
+ sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(0, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf));
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS,"", "¶T¤¤¤ß");
- sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternContains.match(), 1); // should match
+ // non ascii
+ setDrillBuf("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ assertEquals(0, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // should not match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS, "", "¶T¤¤¤ß");
+ sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternContains.match(0, byteBuffer.limit(), drillBuf)); // should match
}
@Test
public void testSqlPatternConstant() {
- RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONSTANT,"ABC.*", "ABC");
-
- StringBuffer sb = new StringBuffer("ABC");
- SqlPatternMatcher sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
+ RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONSTANT, "ABC.*", "ABC");
+ SqlPatternMatcher sqlPatternConstant = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
- assertEquals(1, sqlPatternContains.match()); // ABC should match ABC
+ setDrillBuf("ABC");
+ assertEquals(1, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf)); // ABC should match ABC
- sb.setLength(0);
- sb.append("BC");
- assertEquals(0, sqlPatternContains.match()); // ABC not same as BC
+ setDrillBuf("BC");
+ assertEquals(0, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf)); // ABC not same as BC
- sb.setLength(0);
- assertEquals(0, sqlPatternContains.match()); // null string not same as ABC
+ setDrillBuf("");
+ assertEquals(0, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf)); // null string not same as ABC
- sb.append("DE");
- assertEquals(0, sqlPatternContains.match()); // ABC not same as DE
+ setDrillBuf("DE");
+ assertEquals(0, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf)); // ABC not same as DE
// null pattern should match null string
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONSTANT,"", "");
- sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- sb.setLength(0);
- assertEquals(sqlPatternContains.match(), 1); // null text should match null pattern
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONSTANT, "", "");
+ sqlPatternConstant = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
- // wide character string.
- sb.append("b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4" +
- "ac6Bn1cxblsXFnkp8g8hiQkUMJPyl6l0jTdsIzQ4PkVCURGGyF0aduGqCXUaKp91gqkRMvL" +
- "g1Lh6u0NrGCBoJajPxnwZCyh58cN5aFiNscBFKIqqLPTS1vnbR39nmzU88FM8qDepJRhvein" +
- "hHhmrHdEb22QN20dXEHSygR7vrb2zZhhfWeJbXRsesuYDqdGig801IAS6VWRIdQtJ6gaRhCdNz" +
- " DWnQWRXlMhcrR4MKJXeBgDtjzbHd0ZS53K8u8ORl6FKxtvdKmwUuHiuMJrQQm6Rgx6WJrAtvTf" +
- "UE8a5I3nYXdRppnm3MbRsLu4IxXIblh8kmAIG6n2yHwGhpWYkRI7cwl4dOB3bsxxtdaaTlZMMx6T" +
- "XPaUK10UzfZCAkWG9Du3QhJxxJBZaP3HPebXmw1l5swPohmG3L6zOcEWp7f" +
- "saldC7TOrFa3ReYFHooclSGTgZ9sWjJ5SYJ0vEkI1RMWoeGcdJq5v4lrcB6YjrMqQJIaxAdRnIaNG" +
- "V6oR9SkI4diiXspIvRWj6PMkpqI02ovI3va49bHauTrqTyM9eIhS" +
- "0Mc3SHzknQwHJAFkqmhV9Lm2VLULou2iJDvc5sWW8W48IODGqGytqLogA01Cuo3gURmH2057nCld9" +
- "PDHQEieFMddi4gKPOv4es1YX2aBo4RfYiTlUyXd6gGujVPgU2j" +
- "AAhcz6JqVC08O73gM9zOAM2l4PwN2TN3lBufkQUGyOzHtoTDjSdQ2DPXIks9A6ehIpn92n1UtdrJeMz" +
- "4oMN4kwP95YjQk1ko2e3DVAiPVlCiaWqnzXKa41kLVs3KiBhfAff5" +
- "hoTnBGn9CaXed6g6kLs2YBTQYM9yLW9Wb5qNhLeCM4GGJM8dUWqqEsWYPrcPAkCMa6LXfgEcsCwQ6ij" +
- "JhhjcxwoafBRyyEvQ6Pfhg8IqJ0afBpAZHhR2y4I11zbaJZqs3WG3H3aQHT" +
- "wcPHdBHnk65GdL3Njuoo0K4mcmN6lk7pWptHwTjkw59zTw834PZ8TWm5XiUnsi9JKy41MPqHcbO0nN" +
- "SYl9Q6kEjv4nt8p9unhUYqgrGvLl42nvqGb1F47f6PvxkewuouxMFAszYhaMjZzIf5" +
- "AgmvaXbSP9MKYu6EkkvM9CIhYGZuq7PJUk6wmoG6IxIfOokUcnrGzuU9INFUuXf4LptQ987GU3hw0d" +
- "yMNf6nncwABOOoC5EnqYBNoq29Mf54H5k2Xi8y1fh8ldtKcW9T4WsaXun9fKofegfhwY8wgfoG" +
- "eW2YNW3fdalIsggRzMEAXVDxj7oieReUGiT53uV2kcmcQRQLdUDUcOC1JEiSRpgZl38c1DDVRlz8Rbhi" +
- "KUxMqNCPx6PABXCPocpfXJa0yBT0l3ssgMlDfKsxAHX6aEC86zk0CDmTqZPmBjLAoYaHA3" +
- "uGqoARbQ6rhIBHOdkb7PoRImjmF4sQ60TBIWdao9dqLMjslhOQrGQlPIniW5I1V9nisc5lV0jEqeaC3y" +
- "lSnjhieVJ7H0FYjcsihjQryhyRwUZBGxWFuh0hI9rOv8h5jHKb549hOHPcIjSdLa6M048G" +
- "9drX0LNEixfp7WUqq2DyRfBioybmoHVzFWzhXrMJXzwHakzLwb4T2BHcLK6VpC4b2GodYlZe43ggxTNUErif" +
- "NEfEfxZhDj6HBMYobKvn4ofOsyKPGn6NXnCqIbCCvqOyBikxAYukgCmWHRJRGX4RjNbL" +
- "BVjY5eoXJB7xisnrqOieXuEnZ9n7rnK8qM4RuOSA8EaDd5n58JU9SUUNRqpZZgK2nPy9Pv90ORiGr1Y30rZS" +
- "bKT7SucjEZJ00WBF9FlJp6v8OcVvMBjRriaYYjVlOiLvVDQQ2NvYfbv5bLbEhkrJi5Nlg" +
- "3Tq5jsgSTEBqSKTD5UIukFP194LvVMQIOQ9YM7m9iZHMpCCoIL99FJLsNmzRDVETCjyFoXxSputp6ufupS1n" +
- "1SHRVlXm7Bx3bjJ79O3bGqjzxT1EZV39isegIyKx2H0zEUpnlXzzbusS0tusECmG3C3eGDOTs" +
- "FZbYTp5ZxtXCrudDSX3kaeLtCstfqAHGsjHkPd87aSNaJJjPaSaMmGo7zTJGUIX1VCA2KJP37USIAa5NGHtM" +
- "ChmtfO8kmrO9PZl6Ld18Yi7OlBsEUkMQE0yKwtSpkTK76XS5CG8S7S2S07vtYaBJJ9Bvuzr0F" +
- "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW" +
- "zdSmpjrFnnB8edB5AOekeHua16I9qcNHuCcOgeYZIc6GzG0O1XAcQu6cEi1ZivUPoYf2sKr4uPvcD" +
- "gnaIN1KmhwSmxPgkErJVroPAUO18E2apxRlmZkhS6CInyzcLkvycSDCGtFaAZBO3QDO5nmvPFgVxfSbwG8BhhY" +
- "cWXqwnsbEEejtlXH3Zr5BtxTzd3Bo08s8HxjIXF6Z0CPXcvQzDoemL8M2A1AIrnBkT7vIHgvMuH475M" +
- "TXIR4K0njrS4X4KrBQFxvuZey8tnUnm8oiJWdUFzdM4N0KioJsG8UzxRODxKh4e3GqxmZxsSwwL0nNnV1syiCCC" +
- "zTgrtT6fcxpAfcFeTct7FNd4BjzbNCgBrSspzhxnEFMZXuqBGaOS9d9qcuUulwF0lAWGBauWI57qyjXfQnQ" +
- "i6Sy6nXOcUIOZWJ9BVJf4A27Pa4Pi7ZFznFnIdiQOrxCbb2ZCVkCftWsmcEMnXWXUkGOuA5JXo9YvGyPGq2wgO1wj" +
- "qAKyqxhBVOL48L2D0PYU16Ursxe0ckoBYXJheQi2d1eIa0pTD78325f8jCHclqINvuvj0GZfJENlc1e" +
- "ULPRd358aPnsx2DOmN1UojjBI1hacijCtFCE8zGCa9M0L7aZbRUHe8lmlaqhx0Su6nPnPgfbJr6idfxTJHqCT4t8" +
- "4BfZeqRZ5rgIS15Z7HFYSCPZixMPf683GQoQEFWIM0EqNTJmoHW3K7jDHOUpVutyyWt5VO5ray6rBrq1nAF" +
- "QEN59RqxM04eXxAOBWnPB17TdvDmyXuXDpjnjXReJLNqJVgB2VFPxsqhQWQupAtjBGvffU7exZMM92fiYdBArV" +
- "4SE1mBFewTNRz4PmwFVmUoxWj74rzZQuDMhAlx3jBXcaX8eD7PlaADdiMT1mF3faVyScA6bHbV2jU79XvppOfoD" +
- "YtBFj3a5LtAhTy5BnN2v1XlTQtk6MZ0Ej6g7sW96w9n2XV8wqdWGgjeKHaqH7Pn1XFw7IHvpVYK4wFvIGubp4bpms" +
- "C3ARq1Gqq8zvDQtoLZSZYOvXCZOIElGZLscqjbRckX5aRhTJX6CxjVcT7S3TScnCbqNdfqMpEsNl2GY3fprQF" +
- "CTtiZv12uCj0WILSesMc5ct2tQcIvwnOHAuE6fw7lD8EgQ0emU4zxUIDowhTvJ46k27rXTctIX7HlBEZXInV9r49" +
- "VbJdA3des3ZqGPbBYXTwQcns1jJTmnIf1S0jLWN0Wgk9bH5gkdhl53l2yc1AlZCyJdm9vktH5sctTDdMZrDPPHNUG2" +
- "pTBg4DDR9Zc6YvkrO4f5O3mfOl441bJkmOSNwoOc3krHTQlN6SBGLEptT4m7MFwqVyrbsEXHegwa53aN4W0J7qwV0" +
- "EMN2VHLtoHQDfXVOVDXnE1rK3cDJRMhCIvIRmywkA5T9GchtDVfek2qZq1H5wfe92RoXBseAuMoWtTCJiXOJraCxmj" +
- "cluokF3eK0NpycncoQcObLiS1rield0fdx8UJhsV9QnNtok5a0f4L1MKtjnYJmvItSqn3Lo2VkWagxGSEJzKnK2gO3pH" +
- "Whlarr6bRQeIwCXckALEVdGZBTPiqjYPBfk5H5wYXqkieh04tjSmnWytNebBNmGjTNgrqNVO7ftCbhh7wICOn" +
- "lpSMt6BoFvjHYW1IpEyTlVlvNl5NzPPAn2119ttZTfXpifXfQtBGzlCNYTD6m1FvpmOydzqEq8YadgybW76HDtnBdU" +
- "M1djhNcHfR12NkPc7UIvVJDiTTJ440pU1tqYISyEVr5QZBrhOP2y6RsZnlJy7Mqh56Jw0fJkbI2yQaoc7Jh2Wsh7" +
- "R58SXBXsalwNM9TmTeBMrc8Hghx9hDpai8agUclHTCoyK2hkEpKLlEJiXUKOE8JPugYE8yFVYF49UAjJUbsj6we3Ocii" +
- "FXs6oXGymttSxcRksGdfUaIonkrqniea31SgiGmhCjKi0x5ZDNFS26CqSEU0FKiLJyhui8HOJCddX64Ers0VTMHppS" +
- "ydpQX7PndzDuhT7k8Wj2kGJvKCqzVxTGCssDHoedKmMULEjUqU2EcjT5VOaCFeHKUXyP1B7qfYPtKLcgXHH5bmSgRs8gY" +
- "2JkPOST2Vr35mNKoulUMqFeo0s1y5hcVY39a3mBMytwZn7HgPhEJScwZdWJd6E5tZ13evEmcn1A5YPBYbm91CdJFXhj" +
- "iuqmJS71Xq4j56K35TmCJCb4jAAbcGTGEHzcCP1HKVFfsNnLqwflvHwMYQMA3EumrMn1nXnETZFdZJRHlnO8dwgnT" +
- "ehbB2XtrpErgaFbEWfWEinoiMd4Vs7kgHzs8UiuagYyyCxmg5gEvza3CXzjUnG2lfjI6ox6EYPgXvRySHmL" +
- "atXzj4x3CgF6j1gn10aUJknF7KQLJ84DIA5fy33YaLLbeOoGJHsdr9rQZCjaIqZKH870sslgm0tnGw5yOddnj" +
- "FDI2KwL6UVGr3YExI1p5sGaY0Su4G30PMJsOX9ZWvRF72Lk0pVMnjVugkzsnQrbyGezZ8WN8y8kOvrysQuhTt5" +
- "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
-
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS,"", sb.toString());
- sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
-
- assertEquals(sqlPatternContains.match(), 1);
+ setDrillBuf("");
+ assertEquals(1, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf)); // null text should match null pattern
- // non ascii
- sb.setLength(0);
- sb.append("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ // wide character string.
+ setDrillBuf(wideString);
- assertEquals(sqlPatternContains.match(), 0); // should not match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONTAINS, "", wideString);
+ sqlPatternConstant = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf));
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONSTANT,"", "¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
- sqlPatternContains = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternContains.match(), 1); // should match
+ // non ascii
+ setDrillBuf("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ assertEquals(0, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf)); // should not match
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.CONSTANT, "", "¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ sqlPatternConstant = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternConstant.match(0, byteBuffer.limit(), drillBuf)); // should match
}
@Test
- public void testSqlPatternNotSimple() {
- RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.COMPLEX,"A.*BC.*", "");
+ public void testSqlPatternComplex() {
+ RegexpUtil.SqlPatternInfo patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.COMPLEX, "A.*BC.*", "");
+ SqlPatternMatcher sqlPatternComplex = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
- StringBuffer sb = new StringBuffer("ADEBCDF");
- SqlPatternMatcher sqlPatternComplex = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
+ setDrillBuf("ABCDEF");
+ assertEquals(1, sqlPatternComplex.match(0, byteBuffer.limit(), drillBuf)); // ADEBCDF should match A.*BC.*
- assertEquals(sqlPatternComplex.match(), 1); // ADEBCDF should match A.*BC.*
+ setDrillBuf("BC");
+ assertEquals(0, sqlPatternComplex.match(0, byteBuffer.limit(), drillBuf)); // BC should not match A.*BC.*
- sb.setLength(0);
- sb.append("BC");
- assertEquals(sqlPatternComplex.match(), 0); // BC should not match A.*BC.*
+ setDrillBuf("");
+ assertEquals(sqlPatternComplex.match(0, byteBuffer.limit(), drillBuf), 0); // null string should not match
- sb.setLength(0);
- assertEquals(sqlPatternComplex.match(), 0); // null string should not match
-
- sb.append("DEFGHIJ");
- assertEquals(sqlPatternComplex.match(), 0); // DEFGHIJ should not match A.*BC.*
+ setDrillBuf("DEFGHIJ");
+ assertEquals(sqlPatternComplex.match(0, byteBuffer.limit(), drillBuf), 0); // DEFGHIJ should not match A.*BC.*
java.util.regex.Matcher matcher;
matcher = java.util.regex.Pattern.compile("b00dUrA0.*").matcher("");
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.COMPLEX,"b00dUrA0.*42.*9a8BZ", "");
- sb.setLength(0);
- // wide character string.
- sb.append("b00dUrA0oa2i4ZEHg6zvPXPXlVQYB2BXe8T5gIEtvUDzcN6yUkIqyS07gaAy8k4" +
- "ac6Bn1cxblsXFnkp8g8hiQkUMJPyl6l0jTdsIzQ4PkVCURGGyF0aduGqCXUaKp91gqkRMvL" +
- "g1Lh6u0NrGCBoJajPxnwZCyh58cN5aFiNscBFKIqqLPTS1vnbR39nmzU88FM8qDepJRhvein" +
- "hHhmrHdEb22QN20dXEHSygR7vrb2zZhhfWeJbXRsesuYDqdGig801IAS6VWRIdQtJ6gaRhCdNz" +
- " DWnQWRXlMhcrR4MKJXeBgDtjzbHd0ZS53K8u8ORl6FKxtvdKmwUuHiuMJrQQm6Rgx6WJrAtvTf" +
- "UE8a5I3nYXdRppnm3MbRsLu4IxXIblh8kmAIG6n2yHwGhpWYkRI7cwl4dOB3bsxxtdaaTlZMMx6T" +
- "XPaUK10UzfZCAkWG9Du3QhJxxJBZaP3HPebXmw1l5swPohmG3L6zOcEWp7f" +
- "saldC7TOrFa3ReYFHooclSGTgZ9sWjJ5SYJ0vEkI1RMWoeGcdJq5v4lrcB6YjrMqQJIaxAdRnIaNG" +
- "V6oR9SkI4diiXspIvRWj6PMkpqI02ovI3va49bHauTrqTyM9eIhS" +
- "0Mc3SHzknQwHJAFkqmhV9Lm2VLULou2iJDvc5sWW8W48IODGqGytqLogA01Cuo3gURmH2057nCld9" +
- "PDHQEieFMddi4gKPOv4es1YX2aBo4RfYiTlUyXd6gGujVPgU2j" +
- "AAhcz6JqVC08O73gM9zOAM2l4PwN2TN3lBufkQUGyOzHtoTDjSdQ2DPXIks9A6ehIpn92n1UtdrJeMz" +
- "4oMN4kwP95YjQk1ko2e3DVAiPVlCiaWqnzXKa41kLVs3KiBhfAff5" +
- "hoTnBGn9CaXed6g6kLs2YBTQYM9yLW9Wb5qNhLeCM4GGJM8dUWqqEsWYPrcPAkCMa6LXfgEcsCwQ6ij" +
- "JhhjcxwoafBRyyEvQ6Pfhg8IqJ0afBpAZHhR2y4I11zbaJZqs3WG3H3aQHT" +
- "wcPHdBHnk65GdL3Njuoo0K4mcmN6lk7pWptHwTjkw59zTw834PZ8TWm5XiUnsi9JKy41MPqHcbO0nN" +
- "SYl9Q6kEjv4nt8p9unhUYqgrGvLl42nvqGb1F47f6PvxkewuouxMFAszYhaMjZzIf5" +
- "AgmvaXbSP9MKYu6EkkvM9CIhYGZuq7PJUk6wmoG6IxIfOokUcnrGzuU9INFUuXf4LptQ987GU3hw0d" +
- "yMNf6nncwABOOoC5EnqYBNoq29Mf54H5k2Xi8y1fh8ldtKcW9T4WsaXun9fKofegfhwY8wgfoG" +
- "eW2YNW3fdalIsggRzMEAXVDxj7oieReUGiT53uV2kcmcQRQLdUDUcOC1JEiSRpgZl38c1DDVRlz8Rbhi" +
- "KUxMqNCPx6PABXCPocpfXJa0yBT0l3ssgMlDfKsxAHX6aEC86zk0CDmTqZPmBjLAoYaHA3" +
- "uGqoARbQ6rhIBHOdkb7PoRImjmF4sQ60TBIWdao9dqLMjslhOQrGQlPIniW5I1V9nisc5lV0jEqeaC3y" +
- "lSnjhieVJ7H0FYjcsihjQryhyRwUZBGxWFuh0hI9rOv8h5jHKb549hOHPcIjSdLa6M048G" +
- "9drX0LNEixfp7WUqq2DyRfBioybmoHVzFWzhXrMJXzwHakzLwb4T2BHcLK6VpC4b2GodYlZe43ggxTNUErif" +
- "NEfEfxZhDj6HBMYobKvn4ofOsyKPGn6NXnCqIbCCvqOyBikxAYukgCmWHRJRGX4RjNbL" +
- "BVjY5eoXJB7xisnrqOieXuEnZ9n7rnK8qM4RuOSA8EaDd5n58JU9SUUNRqpZZgK2nPy9Pv90ORiGr1Y30rZS" +
- "bKT7SucjEZJ00WBF9FlJp6v8OcVvMBjRriaYYjVlOiLvVDQQ2NvYfbv5bLbEhkrJi5Nlg" +
- "3Tq5jsgSTEBqSKTD5UIukFP194LvVMQIOQ9YM7m9iZHMpCCoIL99FJLsNmzRDVETCjyFoXxSputp6ufupS1n" +
- "1SHRVlXm7Bx3bjJ79O3bGqjzxT1EZV39isegIyKx2H0zEUpnlXzzbusS0tusECmG3C3eGDOTs" +
- "FZbYTp5ZxtXCrudDSX3kaeLtCstfqAHGsjHkPd87aSNaJJjPaSaMmGo7zTJGUIX1VCA2KJP37USIAa5NGHtM" +
- "ChmtfO8kmrO9PZl6Ld18Yi7OlBsEUkMQE0yKwtSpkTK76XS5CG8S7S2S07vtYaBJJ9Bvuzr0F" +
- "tLsQ1gYWPF1geDalS5MdWfpDvF5MaeJMd2fK0m3jui7xY1IfuSxqZs7SEL6wUVGdWc5tsVroCMMy6Nqjdz5T4vW" +
- "zdSmpjrFnnB8edB5AOekeHua16I9qcNHuCcOgeYZIc6GzG0O1XAcQu6cEi1ZivUPoYf2sKr4uPvcD" +
- "gnaIN1KmhwSmxPgkErJVroPAUO18E2apxRlmZkhS6CInyzcLkvycSDCGtFaAZBO3QDO5nmvPFgVxfSbwG8BhhY" +
- "cWXqwnsbEEejtlXH3Zr5BtxTzd3Bo08s8HxjIXF6Z0CPXcvQzDoemL8M2A1AIrnBkT7vIHgvMuH475M" +
- "TXIR4K0njrS4X4KrBQFxvuZey8tnUnm8oiJWdUFzdM4N0KioJsG8UzxRODxKh4e3GqxmZxsSwwL0nNnV1syiCCC" +
- "zTgrtT6fcxpAfcFeTct7FNd4BjzbNCgBrSspzhxnEFMZXuqBGaOS9d9qcuUulwF0lAWGBauWI57qyjXfQnQ" +
- "i6Sy6nXOcUIOZWJ9BVJf4A27Pa4Pi7ZFznFnIdiQOrxCbb2ZCVkCftWsmcEMnXWXUkGOuA5JXo9YvGyPGq2wgO1wj" +
- "qAKyqxhBVOL48L2D0PYU16Ursxe0ckoBYXJheQi2d1eIa0pTD78325f8jCHclqINvuvj0GZfJENlc1e" +
- "ULPRd358aPnsx2DOmN1UojjBI1hacijCtFCE8zGCa9M0L7aZbRUHe8lmlaqhx0Su6nPnPgfbJr6idfxTJHqCT4t8" +
- "4BfZeqRZ5rgIS15Z7HFYSCPZixMPf683GQoQEFWIM0EqNTJmoHW3K7jDHOUpVutyyWt5VO5ray6rBrq1nAF" +
- "QEN59RqxM04eXxAOBWnPB17TdvDmyXuXDpjnjXReJLNqJVgB2VFPxsqhQWQupAtjBGvffU7exZMM92fiYdBArV" +
- "4SE1mBFewTNRz4PmwFVmUoxWj74rzZQuDMhAlx3jBXcaX8eD7PlaADdiMT1mF3faVyScA6bHbV2jU79XvppOfoD" +
- "YtBFj3a5LtAhTy5BnN2v1XlTQtk6MZ0Ej6g7sW96w9n2XV8wqdWGgjeKHaqH7Pn1XFw7IHvpVYK4wFvIGubp4bpms" +
- "C3ARq1Gqq8zvDQtoLZSZYOvXCZOIElGZLscqjbRckX5aRhTJX6CxjVcT7S3TScnCbqNdfqMpEsNl2GY3fprQF" +
- "CTtiZv12uCj0WILSesMc5ct2tQcIvwnOHAuE6fw7lD8EgQ0emU4zxUIDowhTvJ46k27rXTctIX7HlBEZXInV9r49" +
- "VbJdA3des3ZqGPbBYXTwQcns1jJTmnIf1S0jLWN0Wgk9bH5gkdhl53l2yc1AlZCyJdm9vktH5sctTDdMZrDPPHNUG2" +
- "pTBg4DDR9Zc6YvkrO4f5O3mfOl441bJkmOSNwoOc3krHTQlN6SBGLEptT4m7MFwqVyrbsEXHegwa53aN4W0J7qwV0" +
- "EMN2VHLtoHQDfXVOVDXnE1rK3cDJRMhCIvIRmywkA5T9GchtDVfek2qZq1H5wfe92RoXBseAuMoWtTCJiXOJraCxmj" +
- "cluokF3eK0NpycncoQcObLiS1rield0fdx8UJhsV9QnNtok5a0f4L1MKtjnYJmvItSqn3Lo2VkWagxGSEJzKnK2gO3pH" +
- "Whlarr6bRQeIwCXckALEVdGZBTPiqjYPBfk5H5wYXqkieh04tjSmnWytNebBNmGjTNgrqNVO7ftCbhh7wICOn" +
- "lpSMt6BoFvjHYW1IpEyTlVlvNl5NzPPAn2119ttZTfXpifXfQtBGzlCNYTD6m1FvpmOydzqEq8YadgybW76HDtnBdU" +
- "M1djhNcHfR12NkPc7UIvVJDiTTJ440pU1tqYISyEVr5QZBrhOP2y6RsZnlJy7Mqh56Jw0fJkbI2yQaoc7Jh2Wsh7" +
- "R58SXBXsalwNM9TmTeBMrc8Hghx9hDpai8agUclHTCoyK2hkEpKLlEJiXUKOE8JPugYE8yFVYF49UAjJUbsj6we3Ocii" +
- "FXs6oXGymttSxcRksGdfUaIonkrqniea31SgiGmhCjKi0x5ZDNFS26CqSEU0FKiLJyhui8HOJCddX64Ers0VTMHppS" +
- "ydpQX7PndzDuhT7k8Wj2kGJvKCqzVxTGCssDHoedKmMULEjUqU2EcjT5VOaCFeHKUXyP1B7qfYPtKLcgXHH5bmSgRs8gY" +
- "2JkPOST2Vr35mNKoulUMqFeo0s1y5hcVY39a3mBMytwZn7HgPhEJScwZdWJd6E5tZ13evEmcn1A5YPBYbm91CdJFXhj" +
- "iuqmJS71Xq4j56K35TmCJCb4jAAbcGTGEHzcCP1HKVFfsNnLqwflvHwMYQMA3EumrMn1nXnETZFdZJRHlnO8dwgnT" +
- "ehbB2XtrpErgaFbEWfWEinoiMd4Vs7kgHzs8UiuagYyyCxmg5gEvza3CXzjUnG2lfjI6ox6EYPgXvRySHmL" +
- "atXzj4x3CgF6j1gn10aUJknF7KQLJ84DIA5fy33YaLLbeOoGJHsdr9rQZCjaIqZKH870sslgm0tnGw5yOddnj" +
- "FDI2KwL6UVGr3YExI1p5sGaY0Su4G30PMJsOX9ZWvRF72Lk0pVMnjVugkzsnQrbyGezZ8WN8y8kOvrysQuhTt5" +
- "AFyMJ4kLsONE52kZsJYYyDpWw9a8BZ");
-
- sqlPatternComplex = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternComplex.match(), 1);
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.COMPLEX, "b00dUrA0.*42.*9a8BZ", "");
- // non ascii
- sb.setLength(0);
- sb.append("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ // wide character string.
+ setDrillBuf(wideString);
- assertEquals(sqlPatternComplex.match(), 0); // should not match
+ sqlPatternComplex = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternComplex.match(0, byteBuffer.limit(), drillBuf));
- patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.COMPLEX,".*»Ú®i¶T¤¤¤.*¼Ó®i.*ÆU2~~", "");
- sqlPatternComplex = SqlPatternFactory.getSqlPatternMatcher(patternInfo, sb);
- assertEquals(sqlPatternComplex.match(), 1); // should match
+ // non ascii
+ setDrillBuf("¤EÀsÆW°ê»Ú®i¶T¤¤¤ß3¼Ó®i¶TÆU2~~");
+ assertEquals(0, sqlPatternComplex.match(0, byteBuffer.limit(), drillBuf)); // DEFGHIJ should not match A.*BC.*
+ patternInfo = new RegexpUtil.SqlPatternInfo(RegexpUtil.SqlPatternType.COMPLEX, ".*»Ú®i¶T¤¤¤.*¼Ó®i.*ÆU2~~", "");
+ sqlPatternComplex = SqlPatternFactory.getSqlPatternMatcher(patternInfo);
+ assertEquals(1, sqlPatternComplex.match(0, byteBuffer.limit(), drillBuf)); // should match
}
+ @After
+ public void cleanup() {
+ drillBuf.close();
+ allocator.close();
+ }
}
+
http://git-wip-us.apache.org/repos/asf/drill/blob/df95709a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java
index 4249af6..a8a69e1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestStringFunctions.java
@@ -1113,6 +1113,155 @@ public class TestStringFunctions extends BaseTestQuery {
.build()
.run();
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like 'ab'")
+ .unOrdered()
+ .baselineColumns("id")
+ .expectsEmptyResultSet()
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%ab'")
+ .unOrdered()
+ .baselineColumns("id")
+ .expectsEmptyResultSet()
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like 'ab%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%ab%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like 'abc'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like 'abc%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%abc'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%abc%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like 'abcd'")
+ .unOrdered()
+ .baselineColumns("id")
+ .expectsEmptyResultSet()
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like 'abcd%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .expectsEmptyResultSet()
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%abcd'")
+ .unOrdered()
+ .baselineColumns("id")
+ .expectsEmptyResultSet()
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%abcd%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .expectsEmptyResultSet()
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like ''")
+ .unOrdered()
+ .baselineColumns("id")
+ .expectsEmptyResultSet()
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
+
+ testBuilder()
+ .sqlQuery(" SELECT id FROM (" +
+ "VALUES('abc')) tbl(id)" +
+ "where id like '%%'")
+ .unOrdered()
+ .baselineColumns("id")
+ .baselineValues("abc")
+ .build()
+ .run();
}
@Test