You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by mi...@apache.org on 2019/07/16 11:10:17 UTC
[phoenix] branch master updated: PHOENIX-5327 PherfMainIT fails
with duplicate TABLE/INDEX.
This is an automated email from the ASF dual-hosted git repository.
mihir6692 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git
The following commit(s) were added to refs/heads/master by this push:
new d611422 PHOENIX-5327 PherfMainIT fails with duplicate TABLE/INDEX.
d611422 is described below
commit d61142206389b845376384ed1b2ef5e01e806a9a
Author: Viraj Jasani <vi...@gmail.com>
AuthorDate: Tue Jul 16 16:37:01 2019 +0530
PHOENIX-5327 PherfMainIT fails with duplicate TABLE/INDEX.
Signed-off-by: Monani Mihir <mo...@gmail.com>
---
.../org/apache/phoenix/pherf/DataIngestIT.java | 6 +--
.../java/org/apache/phoenix/pherf/PherfMainIT.java | 3 +-
.../main/java/org/apache/phoenix/pherf/Pherf.java | 7 ++-
.../phoenix/pherf/workload/QueryExecutor.java | 16 +++++--
.../apache/phoenix/pherf/workload/Workload.java | 13 +++---
.../phoenix/pherf/workload/WriteWorkload.java | 10 ++---
.../scenario/prod_test_unsalted_scenario.xml | 50 ++++++++++++++--------
7 files changed, 64 insertions(+), 41 deletions(-)
diff --git a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index 973ce2c..bc768e2 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -44,7 +44,6 @@ import org.apache.phoenix.pherf.workload.Workload;
import org.apache.phoenix.pherf.workload.WorkloadExecutor;
import org.apache.phoenix.pherf.workload.WriteWorkload;
import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Test;
import com.jcabi.jdbc.JdbcSession;
@@ -217,8 +216,7 @@ public class DataIngestIT extends ResultBaseTestIT {
assertExpectedNumberOfRecordsWritten(scenario);
}
- private void assertExpectedNumberOfRecordsWritten(Scenario scenario) throws Exception,
- SQLException {
+ private void assertExpectedNumberOfRecordsWritten(Scenario scenario) throws Exception {
Connection connection = util.getConnection(scenario.getTenantId());
String sql = "select count(*) from " + scenario.getTableName();
Integer count = new JdbcSession(connection).sql(sql).select(new Outcome<Integer>() {
@@ -230,7 +228,7 @@ public class DataIngestIT extends ResultBaseTestIT {
return null;
}
});
- assertNotNull("Could not retrieve count. " + count);
+ assertNotNull("Could not retrieve count. ", count);
assertEquals("Expected 100 rows to have been inserted",
scenario.getRowCount(), count.intValue());
}
diff --git a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/PherfMainIT.java b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/PherfMainIT.java
index 7a080c8..3ee9327 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/PherfMainIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/PherfMainIT.java
@@ -25,10 +25,11 @@ import org.junit.contrib.java.lang.system.ExpectedSystemExit;
import java.util.concurrent.Future;
public class PherfMainIT extends ResultBaseTestIT {
+
@Rule
public final ExpectedSystemExit exit = ExpectedSystemExit.none();
- //@Test disabled until PHOENIX-5327 is fixed
+ @Test
public void testPherfMain() throws Exception {
String[] args = { "-q", "-l",
"--schemaFile", ".*create_prod_test_unsalted.sql",
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
index 2b55e29..05e747a 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
@@ -25,7 +25,6 @@ import java.util.List;
import java.util.Properties;
import com.google.common.annotations.VisibleForTesting;
-import jline.internal.TestAccessible;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
@@ -229,10 +228,10 @@ public class Pherf {
}
// Compare results and exit
- if (null != compareResults) {
+ if (null != compareResults) {
LOGGER.info("\nStarting to compare results and exiting for " + compareResults);
- new GoogleChartGenerator(compareResults, compareType).readAndRender();
- return;
+ new GoogleChartGenerator(compareResults, compareType).readAndRender();
+ return;
}
XMLConfigParser parser = new XMLConfigParser(scenarioFile);
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
index d894a96..c15cf1a 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
@@ -20,9 +20,18 @@ package org.apache.phoenix.pherf.workload;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.phoenix.pherf.PherfConstants.GeneratePhoenixStats;
-import org.apache.phoenix.pherf.configuration.*;
-import org.apache.phoenix.pherf.result.*;
+import org.apache.phoenix.pherf.configuration.DataModel;
+import org.apache.phoenix.pherf.configuration.ExecutionType;
+import org.apache.phoenix.pherf.configuration.Query;
+import org.apache.phoenix.pherf.configuration.QuerySet;
+import org.apache.phoenix.pherf.configuration.Scenario;
+import org.apache.phoenix.pherf.configuration.XMLConfigParser;
+import org.apache.phoenix.pherf.result.DataModelResult;
+import org.apache.phoenix.pherf.result.QueryResult;
+import org.apache.phoenix.pherf.result.QuerySetResult;
+import org.apache.phoenix.pherf.result.ResultManager;
+import org.apache.phoenix.pherf.result.ScenarioResult;
+import org.apache.phoenix.pherf.result.ThreadTime;
import org.apache.phoenix.pherf.rules.RulesApplier;
import org.apache.phoenix.pherf.util.PhoenixUtil;
import org.slf4j.Logger;
@@ -91,6 +100,7 @@ public class QueryExecutor implements Workload {
return callable;
}
+
/**
* Export all queries results to CSV
*
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/Workload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/Workload.java
index 0532201..8dbda91 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/Workload.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/Workload.java
@@ -20,10 +20,13 @@ package org.apache.phoenix.pherf.workload;
import java.util.concurrent.Callable;
public interface Workload {
- public Callable<Void> execute() throws Exception;
- /**
- * Use this method to perform any cleanup or forced shutdown of the thread.
- */
- public void complete();
+
+ Callable<Void> execute() throws Exception;
+
+ /**
+ * Use this method to perform any cleanup or forced shutdown of the thread.
+ */
+ void complete();
+
}
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
index 3df5fe8..c209d98 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
@@ -177,10 +177,11 @@ public class WriteWorkload implements Workload {
};
}
+
private synchronized void exec(DataLoadTimeSummary dataLoadTimeSummary,
DataLoadThreadTime dataLoadThreadTime, Scenario scenario) throws Exception {
LOGGER.info("\nLoading " + scenario.getRowCount() + " rows for " + scenario.getTableName());
-
+
// Execute any pre dataload scenario DDLs
pUtil.executeScenarioDdl(scenario.getPreScenarioDdls(), scenario.getTenantId(), dataLoadTimeSummary);
@@ -263,11 +264,8 @@ public class WriteWorkload implements Workload {
try {
connection = pUtil.getConnection(scenario.getTenantId());
long logStartTime = System.currentTimeMillis();
- long
- maxDuration =
- (WriteWorkload.this.writeParams == null) ?
- Long.MAX_VALUE :
- WriteWorkload.this.writeParams.getExecutionDurationInMs();
+ long maxDuration = WriteWorkload.this.writeParams == null ? Long.MAX_VALUE :
+ WriteWorkload.this.writeParams.getExecutionDurationInMs();
last = start = System.currentTimeMillis();
String sql = buildSql(columns, tableName);
diff --git a/phoenix-pherf/src/main/resources/scenario/prod_test_unsalted_scenario.xml b/phoenix-pherf/src/main/resources/scenario/prod_test_unsalted_scenario.xml
index fb89ef3..0c1de9e 100644
--- a/phoenix-pherf/src/main/resources/scenario/prod_test_unsalted_scenario.xml
+++ b/phoenix-pherf/src/main/resources/scenario/prod_test_unsalted_scenario.xml
@@ -289,6 +289,7 @@
<maxValue>2014-10-17 00:00:00.000</maxValue>
</datavalue>
<datavalue distribution="2">
+ <minValue>2014-10-17 00:00:00.000</minValue>
<maxValue>2014-10-18 00:00:00.000</maxValue>
</datavalue>
<datavalue distribution="2">
@@ -310,20 +311,32 @@
<column>
<type>VARCHAR</type>
<userDefined>true</userDefined>
+ <length>15</length>
<dataSequence>LIST</dataSequence>
+ <valueList>
+ <datavalue>
+ <value>00Dxx0000001gER</value>
+ </datavalue>
+ <datavalue>
+ <value>00Dxx0000001gES</value>
+ </datavalue>
+ <datavalue>
+ <value>00Dxx0000001gET</value>
+ </datavalue>
+ </valueList>
<name>TENANT_ID</name>
</column>
</dataOverride>
-
- <preScenarioDdls>
- <ddl statement="CREATE INDEX IDX_DIVISION ON PHERF.PHERF_PROD_TEST_UNSALTED (DIVISION)"/>
+
+ <preScenarioDdls>
+ <ddl statement="CREATE INDEX IF NOT EXISTS IDX_DIVISION ON PHERF.PHERF_PROD_TEST_UNSALTED (DIVISION)"/>
</preScenarioDdls>
- <postScenarioDdls>
- <ddl statement="CREATE INDEX IDX_OLDVAL_STRING ON PHERF.PHERF_PROD_TEST_UNSALTED (OLDVAL_STRING)"/>
- <ddl statement="CREATE INDEX IDX_CONNECTION_ID ON PHERF.PHERF_PROD_TEST_UNSALTED (CONNECTION_ID)"/>
+ <postScenarioDdls>
+ <ddl statement="CREATE INDEX IF NOT EXISTS IDX_OLDVAL_STRING ON PHERF.PHERF_PROD_TEST_UNSALTED (OLDVAL_STRING)"/>
+ <ddl statement="CREATE INDEX IF NOT EXISTS IDX_CONNECTION_ID ON PHERF.PHERF_PROD_TEST_UNSALTED (CONNECTION_ID)"/>
</postScenarioDdls>
-
+
<writeParams executionDurationInMs="10000">
<!--
Number of writer it insert into the threadpool
@@ -340,19 +353,20 @@
</writeParams>
<!--Minimum of executionDurationInMs or numberOfExecutions. Which ever is reached first -->
<querySet concurrency="1" executionType="PARALLEL" executionDurationInMs="60000"
- numberOfExecutions="100">
+ numberOfExecutions="100">
<!-- Aggregate queries on a per tenant basis -->
<query tenantId="00Dxx0000001gER"
- ddl="CREATE VIEW IF NOT EXISTS PHERF.PHERF_TEST_VIEW_UNSALTED AS SELECT * FROM PHERF.PHERF_PROD_TEST_UNSALTED"
- statement="select count(*) from PHERF.PHERF_TEST_VIEW_UNSALTED"/>
+ ddl="CREATE VIEW IF NOT EXISTS PHERF.PHERF_TEST_VIEW_UNSALTED AS SELECT * FROM PHERF.PHERF_PROD_TEST_UNSALTED"
+ statement="select count(*) from PHERF.PHERF_TEST_VIEW_UNSALTED"/>
</querySet>
</scenario>
- <scenario tableName="PHERF.PHERF_PROD_TEST_UNSALTED" rowCount="10">
+ <scenario tableName="PHERF.PHERF_PROD_TEST_UNSALTED" rowCount="10" name="readWriteScenario">
<dataOverride>
<column>
<type>VARCHAR</type>
<userDefined>true</userDefined>
+ <length>15</length>
<dataSequence>LIST</dataSequence>
<valueList>
<datavalue>
@@ -371,12 +385,12 @@
<!-- Pre and post scenario indexes -->
<preScenarioDdls>
- <ddl>CREATE INDEX IDX_DIVISION ON PHERF.PHERF_PROD_TEST_UNSALTED (DIVISION)</ddl>
+ <ddl statement="CREATE INDEX IF NOT EXISTS IDX_DIVISION ON PHERF.PHERF_PROD_TEST_UNSALTED (DIVISION)"/>
</preScenarioDdls>
<postScenarioDdls>
- <ddl>CREATE INDEX IDX_OLDVAL_STRING ON PHERF.PHERF_PROD_TEST_UNSALTED (OLDVAL_STRING)</ddl>
- <ddl>CREATE INDEX IDX_CONNECTION_ID ON PHERF.PHERF_PROD_TEST_UNSALTED (CONNECTION_ID)</ddl>
+ <ddl statement="CREATE INDEX IF NOT EXISTS IDX_OLDVAL_STRING ON PHERF.PHERF_PROD_TEST_UNSALTED (OLDVAL_STRING)"/>
+ <ddl statement="CREATE INDEX IF NOT EXISTS IDX_CONNECTION_ID ON PHERF.PHERF_PROD_TEST_UNSALTED (CONNECTION_ID)"/>
</postScenarioDdls>
<!--Minimum of executionDurationInMs or numberOfExecutions. Which ever is reached first -->
@@ -384,11 +398,11 @@
<query statement="select count(*) from PHERF.PHERF_PROD_TEST_UNSALTED WHERE TENANT_ID=[TENANT_ID] AND TENANT_ID=[TENANT_ID]"/>
<!-- Aggregate queries on a per tenant basis -->
<query tenantId="00Dxx0000001gER"
- ddl="CREATE VIEW IF NOT EXISTS PHERF.PHERF_TEST_VIEW_UNSALTED AS SELECT * FROM PHERF.PHERF_PROD_TEST_UNSALTED"
- statement="select count(*) from PHERF.PHERF_TEST_VIEW_UNSALTED"/>
+ ddl="CREATE VIEW IF NOT EXISTS PHERF.PHERF_TEST_VIEW_UNSALTED AS SELECT * FROM PHERF.PHERF_PROD_TEST_UNSALTED"
+ statement="select count(*) from PHERF.PHERF_TEST_VIEW_UNSALTED"/>
<query tenantId="00Dxx0000001gES"
- ddl="CREATE VIEW IF NOT EXISTS PHERF.PHERF_TEST_VIEW_UNSALTED AS SELECT * FROM PHERF.PHERF_PROD_TEST_UNSALTED"
- statement="select /*+ SMALL*/ count(*) from PHERF.PHERF_TEST_VIEW_UNSALTED"/>
+ ddl="CREATE VIEW IF NOT EXISTS PHERF.PHERF_TEST_VIEW_UNSALTED AS SELECT * FROM PHERF.PHERF_PROD_TEST_UNSALTED"
+ statement="select /*+ SMALL*/ count(*) from PHERF.PHERF_TEST_VIEW_UNSALTED"/>
</querySet>
</scenario>