You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by pb...@apache.org on 2019/05/01 07:36:38 UTC

[phoenix] branch 5.x-cdh6 updated (f3e17d3 -> 14fcab4)

This is an automated email from the ASF dual-hosted git repository.

pboado pushed a change to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git.


    from f3e17d3  PHOENIX-5217 Incorrect result for COUNT DISTINCT limit
     new f2d9659  PHOENIX-5246: PhoenixAccessControllers.getAccessControllers() method is not correctly implementing the double-checked locking
     new cbca95d  PHOENIX-5173: LIKE and ILIKE statements return empty result list for search without wildcard
     new 5d37370  PhoenixResultSet#next() closes the result set if scanner returns null
     new 3ac1a48  PHOENIX-5187 Avoid using FileInputStream and FileOutputStream
     new d55fc51  PHOENIX-5235: Update SQLline version to the latest
     new 045e0bd  PHOENIX-5252 Add job priority option to UpdateStatisticsTool
     new 2f225e3  PHOENIX-5181 support Math sin/cos/tan functions
     new b66d81c  PHOENIX-5195 PHERF:- Handle batch failure in connection.commit() in WriteWorkload#upsertData
     new ffc630f  PHOENIX-5199 Pherf overrides user provided properties like dataloader threadpool, monitor frequency etc with pherf.properties
     new 30b73ed  PHOENIX-5168 IndexScrutinyTool to output to Table when that option is given
     new abe6dfe  PHOENIX-5251: Avoid taking explicit lock by using AtomicReference in PhoenixAccessController class
     new 14fcab4  PHOENIX-5213 Phoenix-client improvements: add more relocations, exclude log binding, add source jar

The 12 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 phoenix-assembly/pom.xml                           |  22 +
 .../src/build/components/all-common-jars.xml       |   4 +-
 phoenix-client/pom.xml                             | 580 ++++++++++++---------
 .../phoenix/end2end/IndexScrutinyToolIT.java       |  38 +-
 .../apache/phoenix/end2end/LikeExpressionIT.java   |  24 +
 ...d2EndIT.java => MathTrigFunctionEnd2EndIT.java} |  57 +-
 .../org/apache/phoenix/end2end/QueryLoggerIT.java  | 193 +++----
 .../apache/phoenix/cache/ServerCacheClient.java    |   9 +-
 .../apache/phoenix/compile/ExpressionCompiler.java |   3 -
 .../coprocessor/PhoenixAccessController.java       |  23 +-
 .../apache/phoenix/expression/ExpressionType.java  |   3 +
 .../{ExpFunction.java => CosFunction.java}         |  13 +-
 .../{ExpFunction.java => SinFunction.java}         |  13 +-
 .../{ExpFunction.java => TanFunction.java}         |  13 +-
 .../org/apache/phoenix/iterate/BufferedQueue.java  |   7 +-
 .../phoenix/iterate/SpoolingResultIterator.java    |   4 +-
 .../org/apache/phoenix/jdbc/PhoenixResultSet.java  |   4 +-
 .../phoenix/mapreduce/index/IndexScrutinyTool.java |   4 +-
 .../phoenix/schema/stats/UpdateStatisticsTool.java |  32 +-
 .../apache/phoenix/compile/WhereOptimizerTest.java |   8 +-
 .../phoenix/expression/MathTrigFunctionTest.java   | 179 +++++++
 .../schema/stats/UpdateStatisticsToolTest.java     |  15 +
 .../main/java/org/apache/phoenix/pherf/Pherf.java  |   2 +-
 .../phoenix/pherf/workload/WriteWorkload.java      |  36 +-
 pom.xml                                            |   2 +-
 25 files changed, 834 insertions(+), 454 deletions(-)
 copy phoenix-core/src/it/java/org/apache/phoenix/end2end/{OctetLengthFunctionEnd2EndIT.java => MathTrigFunctionEnd2EndIT.java} (53%)
 copy phoenix-core/src/main/java/org/apache/phoenix/expression/function/{ExpFunction.java => CosFunction.java} (80%)
 copy phoenix-core/src/main/java/org/apache/phoenix/expression/function/{ExpFunction.java => SinFunction.java} (80%)
 copy phoenix-core/src/main/java/org/apache/phoenix/expression/function/{ExpFunction.java => TanFunction.java} (80%)
 create mode 100644 phoenix-core/src/test/java/org/apache/phoenix/expression/MathTrigFunctionTest.java


[phoenix] 11/12: PHOENIX-5251: Avoid taking explicit lock by using AtomicReference in PhoenixAccessController class

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit abe6dfec042f96fc0ce66ce923ef3aaf5c84826a
Author: s.kadam <s....@salesforce.com>
AuthorDate: Fri Apr 26 23:41:30 2019 +0100

    PHOENIX-5251: Avoid taking explicit lock by using AtomicReference in PhoenixAccessController class
    
    Signed-off-by: Geoffrey Jacoby <gj...@apache.org>
---
 .../coprocessor/PhoenixAccessController.java       | 23 +++++++++++-----------
 1 file changed, 11 insertions(+), 12 deletions(-)

diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
index 1303363..dad663d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
@@ -26,6 +26,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Optional;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -77,7 +78,7 @@ import com.google.protobuf.RpcController;
 public class PhoenixAccessController extends BaseMetaDataEndpointObserver {
 
     private PhoenixMetaDataControllerEnvironment env;
-    private volatile ArrayList<MasterObserver> accessControllers;
+    AtomicReference<ArrayList<MasterObserver>> accessControllers = new AtomicReference<>();
     private boolean accessCheckEnabled;
     private UserProvider userProvider;
     public static final Log LOG = LogFactory.getLog(PhoenixAccessController.class);
@@ -90,20 +91,18 @@ public class PhoenixAccessController extends BaseMetaDataEndpointObserver {
     }
     
     private List<MasterObserver> getAccessControllers() throws IOException {
-        if (accessControllers == null) {
-            synchronized (this) {
-                if (accessControllers == null) {
-                    accessControllers = new ArrayList<MasterObserver>();
-                    RegionCoprocessorHost cpHost = this.env.getCoprocessorHost();
-                    for (RegionCoprocessor cp : cpHost.findCoprocessors(RegionCoprocessor.class)) {
-                        if (cp instanceof AccessControlService.Interface && cp instanceof MasterObserver) {
-                            accessControllers.add((MasterObserver)cp);
-                        }
-                    }
+        ArrayList<MasterObserver> oldAccessControllers = accessControllers.get();
+        if (oldAccessControllers == null) {
+            oldAccessControllers = new ArrayList<>();
+            RegionCoprocessorHost cpHost = this.env.getCoprocessorHost();
+            for (RegionCoprocessor cp : cpHost.findCoprocessors(RegionCoprocessor.class)) {
+                if (cp instanceof AccessControlService.Interface && cp instanceof MasterObserver) {
+                    oldAccessControllers.add((MasterObserver)cp);
                 }
             }
+            accessControllers.set(oldAccessControllers);
         }
-        return accessControllers;
+        return accessControllers.get();
     }
 
     public ObserverContext<MasterCoprocessorEnvironment> getMasterObsevrverContext() throws IOException {


[phoenix] 12/12: PHOENIX-5213 Phoenix-client improvements: add more relocations, exclude log binding, add source jar

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 14fcab420f1acc8f4bbc10760f7feb63f735a676
Author: Vincent Poon <vi...@apache.org>
AuthorDate: Sat Apr 27 01:31:51 2019 +0100

    PHOENIX-5213 Phoenix-client improvements: add more relocations, exclude log binding, add source jar
---
 phoenix-assembly/pom.xml                           |  22 +
 .../src/build/components/all-common-jars.xml       |   4 +-
 phoenix-client/pom.xml                             | 580 ++++++++++++---------
 3 files changed, 346 insertions(+), 260 deletions(-)

diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index f147ad0..0356040 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -42,6 +42,28 @@
 
   <build>
     <plugins>
+      <plugin>
+        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <executions>
+          <execution>
+            <id>Symlink to deprecated client jar name</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>exec</goal>
+            </goals>
+            <configuration>
+              <executable>ln</executable>
+              <workingDirectory>${project.basedir}/../phoenix-client/target</workingDirectory>
+              <arguments>
+                <argument>-fnsv</argument>
+                <argument>phoenix-client-${project.version}.jar</argument>
+                <argument>phoenix-${project.version}-client.jar</argument>
+              </arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
       <!-- No jars created for this module -->
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
diff --git a/phoenix-assembly/src/build/components/all-common-jars.xml b/phoenix-assembly/src/build/components/all-common-jars.xml
index 08ca29a..d7e8735 100644
--- a/phoenix-assembly/src/build/components/all-common-jars.xml
+++ b/phoenix-assembly/src/build/components/all-common-jars.xml
@@ -27,7 +27,9 @@
       <directory>${project.basedir}/../phoenix-client/target</directory>
       <outputDirectory>/</outputDirectory>
       <includes>
-        <include>phoenix-*-client.jar</include>
+        <include>phoenix-client-${project.version}.jar</include>
+        <!-- deprecated jar name, which we now have a symlink for -->
+        <include>phoenix-${project.version}-client.jar</include>
       </includes>
     </fileSet>
     <fileSet>
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index fb0712b..c1bc549 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -57,69 +57,340 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-jar-plugin</artifactId>
-        <configuration>
-          <finalName>phoenix-${project.version}-client</finalName>
-        </configuration>
       </plugin>
+
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-install-plugin</artifactId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <configuration>
+          <transformers>
+            <transformer
+                implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+            <transformer
+                implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              <resource>csv-bulk-load-config.properties</resource>
+              <file>
+                ${project.basedir}/../config/csv-bulk-load-config.properties
+              </file>
+            </transformer>
+            <transformer
+                implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              <resource>README.md</resource>
+              <file>${project.basedir}/../README.md</file>
+            </transformer>
+            <transformer
+                implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              <resource>LICENSE.txt</resource>
+              <file>${project.basedir}/../LICENSE</file>
+            </transformer>
+            <transformer
+                implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              <resource>NOTICE</resource>
+              <file>${project.basedir}/../NOTICE</file>
+            </transformer>
+          </transformers>
+          <relocations>
+
+            <!-- COM relocation -->
+            <relocation>
+              <pattern>com.beust.jcommander</pattern>
+              <shadedPattern>${shaded.package}.com.beust.jcommander</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.codahale</pattern>
+              <shadedPattern>${shaded.package}.com.codahale</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.fasterxml</pattern>
+              <shadedPattern>${shaded.package}.com.fasterxml</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.google</pattern>
+              <shadedPattern>${shaded.package}.com.google</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.jamesmurty</pattern>
+              <shadedPattern>${shaded.package}.com.jamesmurty</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.jcraft</pattern>
+              <shadedPattern>${shaded.package}.com.jcraft</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.lmax</pattern>
+              <shadedPattern>${shaded.package}.com.lmax</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.sun.xml</pattern>
+              <shadedPattern>${shaded.package}.com.sun.xml</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.sun.istack</pattern>
+              <shadedPattern>${shaded.package}.com.sun.istack</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.sun.research</pattern>
+              <shadedPattern>${shaded.package}.com.sun.research</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.sun.activation</pattern>
+              <shadedPattern>${shaded.package}.com.sun.activation</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.thoughtworks</pattern>
+              <shadedPattern>${shaded.package}.com.thoughtworks</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>com.yammer</pattern>
+              <shadedPattern>${shaded.package}.com.yammer</shadedPattern>
+            </relocation>
+
+            <!-- IO relocations -->
+            <relocation>
+              <pattern>io.netty</pattern>
+              <shadedPattern>${shaded.package}.io.netty</shadedPattern>
+            </relocation>
+
+            <!-- ORG relocations -->
+            <relocation>
+              <pattern>org.antlr</pattern>
+              <shadedPattern>${shaded.package}.org.antlr</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.aopalliance</pattern>
+              <shadedPattern>${shaded.package}.org.aopalliance</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.codehaus</pattern>
+              <shadedPattern>${shaded.package}.org.codehaus</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.fusesource</pattern>
+              <shadedPattern>${shaded.package}.org.fusesource</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.hamcrest</pattern>
+              <shadedPattern>${shaded.package}.org.hamcrest</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.hsqldb</pattern>
+              <shadedPattern>${shaded.package}.org.hsqldb</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.iq80</pattern>
+              <shadedPattern>${shaded.package}.org.iq80</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.jamon</pattern>
+              <shadedPattern>${shaded.package}.org.jamon</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.jboss</pattern>
+              <shadedPattern>${shaded.package}.org.jboss</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.jcodings</pattern>
+              <shadedPattern>${shaded.package}.org.jcodings</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.jets3t</pattern>
+              <shadedPattern>${shaded.package}.org.jets3t</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.joda</pattern>
+              <shadedPattern>${shaded.package}.org.joda</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.joni</pattern>
+              <shadedPattern>${shaded.package}.org.joni</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.junit</pattern>
+              <shadedPattern>${shaded.package}.org.junit</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.kosmix</pattern>
+              <shadedPattern>${shaded.package}.org.kosmix</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.mortbay</pattern>
+              <shadedPattern>${shaded.package}.org.mortbay</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.objectweb</pattern>
+              <shadedPattern>${shaded.package}.org.objectweb</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.stringtemplate</pattern>
+              <shadedPattern>${shaded.package}.org.stringtemplate</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.tukaani</pattern>
+              <shadedPattern>${shaded.package}.org.tukaani</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.znerd</pattern>
+              <shadedPattern>${shaded.package}.org.znerd</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.avro</pattern>
+              <shadedPattern>${shaded.package}.org.apache.avro</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.commons</pattern>
+              <shadedPattern>${shaded.package}.org.apache.commons</shadedPattern>
+              <excludes>
+                <exclude>org.apache.commons.csv.**</exclude>
+                <exclude>org.apache.commons.logging.**</exclude>
+                <exclude>org.apache.commons.configuration.**</exclude>
+              </excludes>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.directory</pattern>
+              <shadedPattern>${shaded.package}.org.apache.directory</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.http</pattern>
+              <shadedPattern>${shaded.package}.org.apache.http</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.jasper</pattern>
+              <shadedPattern>${shaded.package}.org.apache.jasper</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.jute</pattern>
+              <shadedPattern>${shaded.package}.org.apache.jute</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.mina</pattern>
+              <shadedPattern>${shaded.package}.org.apache.mina</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.oro</pattern>
+              <shadedPattern>${shaded.package}.org.apache.oro</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.taglibs</pattern>
+              <shadedPattern>${shaded.package}.org.apache.taglibs</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.thrift</pattern>
+              <shadedPattern>${shaded.package}.org.apache.thrift</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.tools</pattern>
+              <shadedPattern>${shaded.package}.org.apache.tools</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.twill</pattern>
+              <shadedPattern>${shaded.package}.org.apache.twill</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.velocity</pattern>
+              <shadedPattern>${shaded.package}.org.apache.velocity</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.zookeeper</pattern>
+              <shadedPattern>${shaded.package}.org.apache.zookeeper</shadedPattern>
+            </relocation>
+
+            <!-- NET relocations -->
+            <relocation>
+              <pattern>net</pattern>
+              <shadedPattern>${shaded.package}.net</shadedPattern>
+            </relocation>
+
+            <!-- Misc relocations -->
+            <relocation>
+              <pattern>antlr</pattern>
+              <shadedPattern>${shaded.package}.antlr</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>it.unimi</pattern>
+              <shadedPattern>${shaded.package}.it.unimi</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>jline</pattern>
+              <shadedPattern>${shaded.package}.jline</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>junit</pattern>
+              <shadedPattern>${shaded.package}.junit</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>javax.activation</pattern>
+              <shadedPattern>${shaded.package}.javax.activation</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>javax.annotation</pattern>
+              <shadedPattern>${shaded.package}.javax.annotation</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>javax.inject</pattern>
+              <shadedPattern>${shaded.package}.javax.inject</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>javax.xml.bind</pattern>
+              <shadedPattern>${shaded.package}.javax.xml.bind</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>javax.servlet</pattern>
+              <shadedPattern>${shaded.package}.javax.servlet</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>javax.ws</pattern>
+              <shadedPattern>${shaded.package}.javax.ws</shadedPattern>
+            </relocation>
+          </relocations>
+        </configuration>
         <executions>
           <execution>
+            <id>default-shaded</id>
+            <phase>package</phase>
             <goals>
-              <goal>install-file</goal>
+              <goal>shade</goal>
             </goals>
-            <id>default-install</id>
             <configuration>
-              <skip>true</skip>
+              <shadedArtifactAttached>false</shadedArtifactAttached>
+              <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
+              <shadeTestJar>false</shadeTestJar>
+              <createSourcesJar>true</createSourcesJar>
+              <dependencyReducedPomLocation>${basedir}/target/pom.xml</dependencyReducedPomLocation>
+              <artifactSet>
+                <includes>
+                  <include>*:*</include>
+                </includes>
+                <excludes>
+                  <exclude>org.apache.phoenix:phoenix-client</exclude>
+                  <exclude>xom:xom</exclude>
+                </excludes>
+              </artifactSet>
+              <filters>
+                <filter>
+                  <artifact>*:*</artifact>
+                  <excludes>
+                    <exclude>META-INF/*.SF</exclude>
+                    <exclude>META-INF/*.DSA</exclude>
+                    <exclude>META-INF/*.RSA</exclude>
+                    <exclude>META-INF/license/*</exclude>
+                    <exclude>LICENSE.*</exclude>
+                    <exclude>NOTICE.*</exclude>
+                  </excludes>
+                </filter>
+              </filters>
             </configuration>
-            <phase>install</phase>
           </execution>
-        </executions>
-        <configuration>
-          <file>${basedir}/target/phoenix-${project.version}-client.jar</file>
-	  <pomFile>${basedir}/pom.xml</pomFile>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <executions>
           <execution>
+            <id>embedded-shaded</id>
             <phase>package</phase>
             <goals>
               <goal>shade</goal>
             </goals>
             <configuration>
-              <finalName>phoenix-${project.version}-client</finalName>
-              <shadedArtifactAttached>false</shadedArtifactAttached>
+              <shadedClassifierName>embedded</shadedClassifierName>
+              <shadedArtifactAttached>true</shadedArtifactAttached>
               <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
               <shadeTestJar>false</shadeTestJar>
-              <transformers>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
-                <transformer
-                        implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>csv-bulk-load-config.properties</resource>
-                  <file>
-                    ${project.basedir}/../config/csv-bulk-load-config.properties
-                  </file>
-                </transformer>
-                <transformer
-                        implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>README.md</resource>
-                  <file>${project.basedir}/../README.md</file>
-                </transformer>
-                <transformer
-                        implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>LICENSE.txt</resource>
-                  <file>${project.basedir}/../LICENSE</file>
-                </transformer>
-                <transformer
-                    implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
-                  <resource>NOTICE</resource>
-                  <file>${project.basedir}/../NOTICE</file>
-                </transformer>
-              </transformers>
               <artifactSet>
                 <includes>
                   <include>*:*</include>
@@ -127,7 +398,6 @@
                 <excludes>
                   <exclude>org.apache.phoenix:phoenix-client</exclude>
                   <exclude>xom:xom</exclude>
-                  <exclude>log4j:log4j</exclude>
                   <exclude>org.slf4j:slf4j-log4j12</exclude>
                 </excludes>
               </artifactSet>
@@ -144,219 +414,6 @@
                   </excludes>
                 </filter>
               </filters>
-
-              <relocations>
-
-                <!-- COM relocation -->
-                <relocation>
-                  <pattern>com.codahale</pattern>
-                  <shadedPattern>${shaded.package}.com.codahale</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.fasterxml</pattern>
-                  <shadedPattern>${shaded.package}.com.fasterxml</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.jamesmurty</pattern>
-                  <shadedPattern>${shaded.package}.com.jamesmurty</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.google</pattern>
-                  <shadedPattern>${shaded.package}.com.google</shadedPattern>
-                  <excludes>
-                    <exclude>com.google.protobuf.**</exclude>
-                  </excludes>
-                </relocation>
-                <relocation>
-                  <pattern>com.jcraft</pattern>
-                  <shadedPattern>${shaded.package}.com.jcraft</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.lmax</pattern>
-                  <shadedPattern>${shaded.package}.com.lmax</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.thoughtworks</pattern>
-                  <shadedPattern>${shaded.package}.com.thoughtworks</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>com.yammer</pattern>
-                  <shadedPattern>${shaded.package}.com.yammer</shadedPattern>
-                </relocation>
-
-                <!-- IO relocations -->
-                <relocation>
-                  <pattern>io.netty</pattern>
-                  <shadedPattern>${shaded.package}.io.netty</shadedPattern>
-                </relocation>
-
-                <!-- ORG relocations -->
-                <relocation>
-                  <pattern>org.antlr</pattern>
-                  <shadedPattern>${shaded.package}.org.antlr</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.aopalliance</pattern>
-                  <shadedPattern>${shaded.package}.org.aopalliance</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.codehaus</pattern>
-                  <shadedPattern>${shaded.package}.org.codehaus</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.fusesource</pattern>
-                  <shadedPattern>${shaded.package}.org.fusesource</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.hamcrest</pattern>
-                  <shadedPattern>${shaded.package}.org.hamcrest</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.hsqldb</pattern>
-                  <shadedPattern>${shaded.package}.org.hsqldb</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.iq80</pattern>
-                  <shadedPattern>${shaded.package}.org.iq80</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jamon</pattern>
-                  <shadedPattern>${shaded.package}.org.jamon</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jboss</pattern>
-                  <shadedPattern>${shaded.package}.org.jboss</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jcodings</pattern>
-                  <shadedPattern>${shaded.package}.org.jcodings</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.jets3t</pattern>
-                  <shadedPattern>${shaded.package}.org.jets3t</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.joda</pattern>
-                  <shadedPattern>${shaded.package}.org.joda</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.joni</pattern>
-                  <shadedPattern>${shaded.package}.org.joni</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.junit</pattern>
-                  <shadedPattern>${shaded.package}.org.junit</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.kosmix</pattern>
-                  <shadedPattern>${shaded.package}.org.kosmix</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.mortbay</pattern>
-                  <shadedPattern>${shaded.package}.org.mortbay</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.objectweb</pattern>
-                  <shadedPattern>${shaded.package}.org.objectweb</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.stringtemplate</pattern>
-                  <shadedPattern>${shaded.package}.org.stringtemplate</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.tukaani</pattern>
-                  <shadedPattern>${shaded.package}.org.tukaani</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.znerd</pattern>
-                  <shadedPattern>${shaded.package}.org.znerd</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.avro</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.avro</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.commons</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.commons</shadedPattern>
-                  <excludes>
-                    <exclude>org.apache.commons.csv.**</exclude>
-                    <exclude>org.apache.commons.logging.**</exclude>
-                    <exclude>org.apache.commons.configuration.**</exclude>
-                  </excludes>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.directory</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.directory</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.http</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.http</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.jasper</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.jasper</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.jute</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.jute</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.mina</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.mina</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.oro</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.oro</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.taglibs</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.taglibs</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.thrift</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.thrift</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.tools</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.tools</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.twill</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.twill</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.velocity</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.velocity</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>org.apache.zookeeper</pattern>
-                  <shadedPattern>${shaded.package}.org.apache.zookeeper</shadedPattern>
-                </relocation>
-
-                <!-- NET relocations -->
-                <relocation>
-                  <pattern>net</pattern>
-                  <shadedPattern>${shaded.package}.net</shadedPattern>
-                </relocation>
-
-                <!-- Misc relocations -->
-                <relocation>
-                  <pattern>antlr</pattern>
-                  <shadedPattern>${shaded.package}.antlr</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>it.unimi</pattern>
-                  <shadedPattern>${shaded.package}.it.unimi</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>jline</pattern>
-                  <shadedPattern>${shaded.package}.jline</shadedPattern>
-                </relocation>
-                <relocation>
-                  <pattern>junit</pattern>
-                  <shadedPattern>${shaded.package}.junit</shadedPattern>
-                </relocation>
-              </relocations>
             </configuration>
           </execution>
         </executions>
@@ -370,5 +427,10 @@
       <groupId>org.apache.phoenix</groupId>
       <artifactId>phoenix-core</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <version>1.7.7</version>
+    </dependency>
   </dependencies>
 </project>


[phoenix] 01/12: PHOENIX-5246: PhoenixAccessControllers.getAccessControllers() method is not correctly implementing the double-checked locking

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit f2d96590fa9be307f93ec56538c0ef1301d0ec9b
Author: s.kadam <s....@salesforce.com>
AuthorDate: Fri Apr 19 21:10:27 2019 +0100

    PHOENIX-5246: PhoenixAccessControllers.getAccessControllers() method is not correctly implementing the double-checked locking
---
 .../java/org/apache/phoenix/coprocessor/PhoenixAccessController.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
index d07f4f7..1303363 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
@@ -77,7 +77,7 @@ import com.google.protobuf.RpcController;
 public class PhoenixAccessController extends BaseMetaDataEndpointObserver {
 
     private PhoenixMetaDataControllerEnvironment env;
-    private ArrayList<MasterObserver> accessControllers;
+    private volatile ArrayList<MasterObserver> accessControllers;
     private boolean accessCheckEnabled;
     private UserProvider userProvider;
     public static final Log LOG = LogFactory.getLog(PhoenixAccessController.class);


[phoenix] 10/12: PHOENIX-5168 IndexScrutinyTool to output to Table when that option is given

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 30b73ed2f2d796fc9113f7b8847046b36124bc49
Author: Gokcen Iskender <gi...@salesforce.com>
AuthorDate: Wed Apr 24 21:16:34 2019 +0100

    PHOENIX-5168 IndexScrutinyTool to output to Table when that option is given
---
 .../phoenix/end2end/IndexScrutinyToolIT.java       | 38 ++++++++++++++--------
 .../phoenix/mapreduce/index/IndexScrutinyTool.java |  4 ++-
 2 files changed, 27 insertions(+), 15 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
index 046c3f0..72857e7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
@@ -10,6 +10,7 @@
  */
 package org.apache.phoenix.end2end;
 
+import static org.apache.phoenix.mapreduce.index.IndexScrutinyTableOutput.OUTPUT_TABLE_NAME;
 import static org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters.BAD_COVERED_COL_VAL_COUNT;
 import static org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters.BATCHES_PROCESSED_COUNT;
 import static org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters.INVALID_ROW_COUNT;
@@ -172,6 +173,14 @@ public class IndexScrutinyToolIT {
         protected long getCounterValue(Counters counters, Enum<PhoenixScrutinyJobCounters> counter) {
             return counters.findCounter(counter).getValue();
         }
+
+        protected int countRows(Connection conn, String tableFullName) throws SQLException {
+            ResultSet count = conn.createStatement().executeQuery("select count(*) from " + tableFullName);
+            count.next();
+            int numRows = count.getInt(1);
+            return numRows;
+        }
+
     }
 
     @RunWith(Parameterized.class) public static class IndexScrutinyToolNonTenantIT extends SharedIndexToolIT {
@@ -247,8 +256,8 @@ public class IndexScrutinyToolIT {
             upsertRow(dataTableUpsertStmt, 2, "name-2", 95123);
             conn.commit();
 
-            int numDataRows = countRows(dataTableFullName);
-            int numIndexRows = countRows(indexTableFullName);
+            int numDataRows = countRows(conn, dataTableFullName);
+            int numIndexRows = countRows(conn, indexTableFullName);
 
             // scrutiny should report everything as ok
             List<Job> completedJobs = runScrutiny(schemaName, dataTableName, indexTableName);
@@ -259,8 +268,8 @@ public class IndexScrutinyToolIT {
             assertEquals(0, getCounterValue(counters, INVALID_ROW_COUNT));
 
             // make sure row counts weren't modified by scrutiny
-            assertEquals(numDataRows, countRows(dataTableFullName));
-            assertEquals(numIndexRows, countRows(indexTableFullName));
+            assertEquals(numDataRows, countRows(conn, dataTableFullName));
+            assertEquals(numIndexRows, countRows(conn, indexTableFullName));
         }
 
         /**
@@ -405,7 +414,7 @@ public class IndexScrutinyToolIT {
                 deleteRow(indexTableFullName, "WHERE \":ID\"=" + idToDelete);
             }
             conn.commit();
-            int numRows = countRows(indexTableFullName);
+            int numRows = countRows(conn, indexTableFullName);
             int numDeleted = numTestRows - numRows;
 
             // run scrutiny with batch size of 10
@@ -683,13 +692,6 @@ public class IndexScrutinyToolIT {
             indexTableFullName = SchemaUtil.getTableName(schemaName, indexTableName);
         }
 
-        private int countRows(String tableFullName) throws SQLException {
-            ResultSet count = conn.createStatement().executeQuery("select count(*) from " + tableFullName);
-            count.next();
-            int numRows = count.getInt(1);
-            return numRows;
-        }
-
         private void upsertIndexRow(String name, int id, int zip) throws SQLException {
             indexTableUpsertStmt.setString(1, name);
             indexTableUpsertStmt.setInt(2, id); // id
@@ -898,9 +900,17 @@ public class IndexScrutinyToolIT {
         * Add 3 rows to Tenant view.
         * Empty index table and observe they are not equal.
         * Use data table as source and output to file.
-        * Output to table doesn't work for tenantid connection because it can't create the scrutiny table as tenant.
         **/
         @Test public void testWithEmptyIndexTableOutputToFile() throws Exception{
+            testWithOutput(OutputFormat.FILE);
+        }
+
+        @Test public void testWithEmptyIndexTableOutputToTable() throws Exception{
+            testWithOutput(OutputFormat.TABLE);
+            assertEquals(3, countRows(connGlobal, OUTPUT_TABLE_NAME));
+        }
+
+        private void testWithOutput(OutputFormat outputFormat) throws Exception {
             connTenant.createStatement()
                     .execute(String.format(upsertQueryStr, tenantViewName, tenantId, 1, "x"));
             connTenant.createStatement()
@@ -919,7 +929,7 @@ public class IndexScrutinyToolIT {
 
             String[]
                     argValues =
-                    getArgValues("", tenantViewName, indexNameTenant, 10L, SourceTable.DATA_TABLE_SOURCE, true, OutputFormat.FILE, null,
+                    getArgValues("", tenantViewName, indexNameTenant, 10L, SourceTable.DATA_TABLE_SOURCE, true, outputFormat, null,
                             tenantId, EnvironmentEdgeManager.currentTimeMillis());
             List<Job> completedJobs = runScrutiny(argValues);
 
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyTool.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyTool.java
index 26d7336..39df6ac 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyTool.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexScrutinyTool.java
@@ -428,7 +428,9 @@ public class IndexScrutinyTool extends Configured implements Tool {
 
             if (outputInvalidRows && OutputFormat.TABLE.equals(outputFormat)) {
                 // create the output table if it doesn't exist
-                try (Connection outputConn = ConnectionUtil.getOutputConnection(configuration)) {
+                Configuration outputConfiguration = HBaseConfiguration.create(configuration);
+                outputConfiguration.unset(PhoenixRuntime.TENANT_ID_ATTRIB);
+                try (Connection outputConn = ConnectionUtil.getOutputConnection(outputConfiguration)) {
                     outputConn.createStatement().execute(IndexScrutinyTableOutput.OUTPUT_TABLE_DDL);
                     outputConn.createStatement()
                             .execute(IndexScrutinyTableOutput.OUTPUT_METADATA_DDL);


[phoenix] 04/12: PHOENIX-5187 Avoid using FileInputStream and FileOutputStream

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 3ac1a4830a8aa6140c2491dc2417551453baf590
Author: Aman Poonia <am...@gmail.com>
AuthorDate: Mon Mar 11 17:44:23 2019 +0000

    PHOENIX-5187 Avoid using FileInputStream and FileOutputStream
---
 .../main/java/org/apache/phoenix/cache/ServerCacheClient.java    | 9 +++++----
 .../src/main/java/org/apache/phoenix/iterate/BufferedQueue.java  | 7 +++----
 .../java/org/apache/phoenix/iterate/SpoolingResultIterator.java  | 4 ++--
 3 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
index 822e255..bb96637 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
@@ -22,9 +22,10 @@ import static org.apache.phoenix.util.LogUtil.addCustomAnnotations;
 
 import java.io.Closeable;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -148,7 +149,7 @@ public class ServerCacheClient {
                 } catch (InsufficientMemoryException e) {
                     this.outputFile = File.createTempFile("HashJoinCacheSpooler", ".bin", new File(services.getProps()
                             .get(QueryServices.SPOOL_DIRECTORY, QueryServicesOptions.DEFAULT_SPOOL_DIRECTORY)));
-                    try (FileOutputStream fio = new FileOutputStream(outputFile)) {
+                    try (OutputStream fio = Files.newOutputStream(outputFile.toPath())) {
                         fio.write(cachePtr.get(), cachePtr.getOffset(), cachePtr.getLength());
                     }
                 }
@@ -158,7 +159,7 @@ public class ServerCacheClient {
 
         public ImmutableBytesWritable getCachePtr() throws IOException {
             if(this.outputFile!=null){
-                try (FileInputStream fio = new FileInputStream(outputFile)) {
+                try (InputStream fio = Files.newInputStream(outputFile.toPath())) {
                     byte[] b = new byte[this.size];
                     fio.read(b);
                     cachePtr = new ImmutableBytesWritable(b);
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BufferedQueue.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BufferedQueue.java
index 1a646e6..3352641 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BufferedQueue.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BufferedQueue.java
@@ -23,9 +23,8 @@ import java.io.Closeable;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.file.Files;
 import java.util.AbstractQueue;
 import java.util.Comparator;
 import java.util.Iterator;
@@ -304,7 +303,7 @@ public abstract class BufferedQueue<T> extends AbstractQueue<T> implements SizeA
             if (totalResultSize >= thresholdBytes) {
                 this.file = File.createTempFile(UUID.randomUUID().toString(), null);
                 try (DataOutputStream out = new DataOutputStream(
-                        new BufferedOutputStream(new FileOutputStream(file)))) {
+                        new BufferedOutputStream(Files.newOutputStream(file.toPath())))) {
                     int resSize = inMemQueue.size();
                     for (int i = 0; i < resSize; i++) {
                         T e = inMemQueue.poll();
@@ -342,7 +341,7 @@ public abstract class BufferedQueue<T> extends AbstractQueue<T> implements SizeA
                 this.next = null;
                 try {
                     this.in = new DataInputStream(
-                            new BufferedInputStream(new FileInputStream(file)));
+                            new BufferedInputStream(Files.newInputStream(file.toPath())));
                 } catch (IOException e) {
                     throw new RuntimeException(e);
                 }
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java
index fa90b1a..0823026 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/SpoolingResultIterator.java
@@ -27,8 +27,8 @@ import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.EOFException;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
+import java.nio.file.Files;
 import java.sql.SQLException;
 import java.util.List;
 
@@ -273,7 +273,7 @@ public class SpoolingResultIterator implements PeekingResultIterator {
 
         private synchronized void init() throws IOException {
             if (spoolFrom == null) {
-                spoolFrom = new DataInputStream(new BufferedInputStream(new FileInputStream(file)));
+                spoolFrom = new DataInputStream(new BufferedInputStream(Files.newInputStream(file.toPath())));
                 advance();
             }
         }


[phoenix] 03/12: PhoenixResultSet#next() closes the result set if scanner returns null

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 5d37370c7c48558f924dce32a6f2c9c5dd52efe6
Author: s.kadam <s....@salesforce.com>
AuthorDate: Thu Apr 18 22:05:21 2019 +0100

    PhoenixResultSet#next() closes the result set if scanner returns null
---
 .../org/apache/phoenix/end2end/QueryLoggerIT.java  | 193 +++++++++++----------
 .../org/apache/phoenix/jdbc/PhoenixResultSet.java  |   4 +-
 2 files changed, 102 insertions(+), 95 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryLoggerIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryLoggerIT.java
index 208eddd..8a08d37 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryLoggerIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryLoggerIT.java
@@ -103,44 +103,47 @@ public class QueryLoggerIT extends BaseUniqueNamesOwnClusterIT {
         Connection conn = DriverManager.getConnection(getUrl(),props);
         assertEquals(conn.unwrap(PhoenixConnection.class).getLogLevel(),LogLevel.DEBUG);
         String query = "SELECT * FROM " + tableName;
-        ResultSet rs = conn.createStatement().executeQuery(query);
-        StatementContext context = ((PhoenixResultSet)rs).getContext();
-        String queryId = context.getQueryLogger().getQueryId();
-        while (rs.next()) {
-            rs.getString(1);
-            rs.getString(2);
+        StatementContext context;
+        try (ResultSet rs = conn.createStatement().executeQuery(query)) {
+            context = ((PhoenixResultSet) rs).getContext();
+            while (rs.next()) {
+                rs.getString(1);
+                rs.getString(2);
+            }
         }
-        ResultSet explainRS = conn.createStatement().executeQuery("Explain " + query);
+        String queryId = context.getQueryLogger().getQueryId();
 
         String logQuery = "SELECT * FROM " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_LOG_TABLE + "\"";
         int delay = 5000;
 
         // sleep for sometime to let query log committed
         Thread.sleep(delay);
-        rs = conn.createStatement().executeQuery(logQuery);
-        boolean foundQueryLog = false;
+        try (ResultSet explainRS = conn.createStatement().executeQuery("Explain " + query);
+             ResultSet rs = conn.createStatement().executeQuery(logQuery)) {
+            boolean foundQueryLog = false;
 
-        while (rs.next()) {
-            if (rs.getString(QUERY_ID).equals(queryId)) {
-                foundQueryLog = true;
-                assertEquals(rs.getString(BIND_PARAMETERS), null);
-                assertEquals(rs.getString(USER), System.getProperty("user.name"));
-                assertEquals(rs.getString(CLIENT_IP), InetAddress.getLocalHost().getHostAddress());
-                assertEquals(rs.getString(EXPLAIN_PLAN), QueryUtil.getExplainPlan(explainRS));
-                assertEquals(rs.getString(GLOBAL_SCAN_DETAILS), context.getScan().toJSON());
-                assertEquals(rs.getLong(NO_OF_RESULTS_ITERATED), 10);
-                assertEquals(rs.getString(QUERY), query);
-                assertEquals(rs.getString(QUERY_STATUS), QueryStatus.COMPLETED.toString());
-                assertEquals(rs.getString(TENANT_ID), null);
-                assertTrue(rs.getString(SCAN_METRICS_JSON)==null);
-                assertEquals(rs.getString(EXCEPTION_TRACE),null);
-            }else{
-                //confirm we are not logging system queries
-                assertFalse(rs.getString(QUERY).toString().contains(SYSTEM_CATALOG_SCHEMA));
+            while (rs.next()) {
+                if (rs.getString(QUERY_ID).equals(queryId)) {
+                    foundQueryLog = true;
+                    assertEquals(rs.getString(BIND_PARAMETERS), null);
+                    assertEquals(rs.getString(USER), System.getProperty("user.name"));
+                    assertEquals(rs.getString(CLIENT_IP), InetAddress.getLocalHost().getHostAddress());
+                    assertEquals(rs.getString(EXPLAIN_PLAN), QueryUtil.getExplainPlan(explainRS));
+                    assertEquals(rs.getString(GLOBAL_SCAN_DETAILS), context.getScan().toJSON());
+                    assertEquals(rs.getLong(NO_OF_RESULTS_ITERATED), 10);
+                    assertEquals(rs.getString(QUERY), query);
+                    assertEquals(rs.getString(QUERY_STATUS), QueryStatus.COMPLETED.toString());
+                    assertEquals(rs.getString(TENANT_ID), null);
+                    assertTrue(rs.getString(SCAN_METRICS_JSON) == null);
+                    assertEquals(rs.getString(EXCEPTION_TRACE), null);
+                } else {
+                    //confirm we are not logging system queries
+                    assertFalse(rs.getString(QUERY).toString().contains(SYSTEM_CATALOG_SCHEMA));
+                }
             }
+            assertTrue(foundQueryLog);
+            conn.close();
         }
-        assertTrue(foundQueryLog);
-        conn.close();
     }
     
     @Test
@@ -155,12 +158,12 @@ public class QueryLoggerIT extends BaseUniqueNamesOwnClusterIT {
         String query = "SELECT * FROM " + tableName;
         int count=100;
         for (int i = 0; i < count; i++) {
-            ResultSet rs = conn.createStatement().executeQuery(query);
-            while(rs.next()){
-                
+            try (ResultSet rs = conn.createStatement().executeQuery(query)) {
+                while (rs.next()) {
+
+                }
             }
         }
-        
         String logQuery = "SELECT * FROM " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_LOG_TABLE + "\"";
         
         int delay = 5000;
@@ -187,37 +190,39 @@ public class QueryLoggerIT extends BaseUniqueNamesOwnClusterIT {
         Connection conn = DriverManager.getConnection(getUrl(),props);
         assertEquals(conn.unwrap(PhoenixConnection.class).getLogLevel(),LogLevel.INFO);
         String query = "SELECT * FROM " + tableName;
-        
-        ResultSet rs = conn.createStatement().executeQuery(query);
-        StatementContext context = ((PhoenixResultSet)rs).getContext();
-        String queryId = context.getQueryLogger().getQueryId();
-        while (rs.next()) {
-            rs.getString(1);
-            rs.getString(2);
+        StatementContext context;
+        try (ResultSet rs = conn.createStatement().executeQuery(query)) {
+            context = ((PhoenixResultSet) rs).getContext();
+            while (rs.next()) {
+                rs.getString(1);
+                rs.getString(2);
+            }
         }
+        String queryId = context.getQueryLogger().getQueryId();
 
         String logQuery = "SELECT * FROM " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_LOG_TABLE + "\"";
         int delay = 5000;
 
         // sleep for sometime to let query log committed
         Thread.sleep(delay);
-        rs = conn.createStatement().executeQuery(logQuery);
-        boolean foundQueryLog = false;
-        while (rs.next()) {
-            if (rs.getString(QUERY_ID).equals(queryId)) {
-                foundQueryLog = true;
-                assertEquals(rs.getString(USER), System.getProperty("user.name"));
-                assertEquals(rs.getString(CLIENT_IP), InetAddress.getLocalHost().getHostAddress());
-                assertEquals(rs.getString(EXPLAIN_PLAN), null);
-                assertEquals(rs.getString(GLOBAL_SCAN_DETAILS),null);
-                assertEquals(rs.getLong(NO_OF_RESULTS_ITERATED), 10);
-                assertEquals(rs.getString(QUERY), query);
-                assertEquals(rs.getString(QUERY_STATUS),QueryStatus.COMPLETED.toString());
-                assertEquals(rs.getString(TENANT_ID), null);
+        try (ResultSet rs = conn.createStatement().executeQuery(logQuery)) {
+            boolean foundQueryLog = false;
+            while (rs.next()) {
+                if (rs.getString(QUERY_ID).equals(queryId)) {
+                    foundQueryLog = true;
+                    assertEquals(rs.getString(USER), System.getProperty("user.name"));
+                    assertEquals(rs.getString(CLIENT_IP), InetAddress.getLocalHost().getHostAddress());
+                    assertEquals(rs.getString(EXPLAIN_PLAN), null);
+                    assertEquals(rs.getString(GLOBAL_SCAN_DETAILS), null);
+                    assertEquals(rs.getLong(NO_OF_RESULTS_ITERATED), 10);
+                    assertEquals(rs.getString(QUERY), query);
+                    assertEquals(rs.getString(QUERY_STATUS), QueryStatus.COMPLETED.toString());
+                    assertEquals(rs.getString(TENANT_ID), null);
+                }
             }
+            assertTrue(foundQueryLog);
+            conn.close();
         }
-        assertTrue(foundQueryLog);
-        conn.close();
     }
     
     @Test
@@ -275,46 +280,50 @@ public class QueryLoggerIT extends BaseUniqueNamesOwnClusterIT {
         final MyClock clock = new MyClock(100);
         EnvironmentEdgeManager.injectEdge(clock);
         try{
-        String query = "SELECT * FROM " + tableName +" where V = ?";
-        
-        PreparedStatement pstmt = conn.prepareStatement(query);
-        pstmt.setString(1, "value5");
-        ResultSet rs = pstmt.executeQuery();
-        StatementContext context = ((PhoenixResultSet)rs).getContext();
-        String queryId = context.getQueryLogger().getQueryId();
-        while (rs.next()) {
-            rs.getString(1);
-            rs.getString(2);
-        }
-        ResultSet explainRS = conn.createStatement()
-                .executeQuery("Explain " + "SELECT * FROM " + tableName + " where V = 'value5'");
-        String logQuery = "SELECT * FROM " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_LOG_TABLE + "\"";
-        int delay = 5000;
+            String query = "SELECT * FROM " + tableName +" where V = ?";
+            StatementContext context;
+            PreparedStatement pstmt = conn.prepareStatement(query);
+            pstmt.setString(1, "value5");
+            try (ResultSet rs = pstmt.executeQuery()) {
+                 context = ((PhoenixResultSet) rs).getContext();
+                while (rs.next()) {
+                    rs.getString(1);
+                    rs.getString(2);
+                }
+            }
+            String queryId = context.getQueryLogger().getQueryId();
 
-        // sleep for sometime to let query log committed
-        Thread.sleep(delay);
-        rs = conn.createStatement().executeQuery(logQuery);
-        boolean foundQueryLog = false;
-        while (rs.next()) {
-            if (rs.getString(QUERY_ID).equals(queryId)) {
-                foundQueryLog = true;
-                assertEquals(rs.getString(BIND_PARAMETERS), loglevel == LogLevel.TRACE ? "value5" : null);
-                assertEquals(rs.getString(USER), System.getProperty("user.name"));
-                assertEquals(rs.getString(CLIENT_IP), InetAddress.getLocalHost().getHostAddress());
-                assertEquals(rs.getString(EXPLAIN_PLAN), QueryUtil.getExplainPlan(explainRS));
-                assertEquals(rs.getString(GLOBAL_SCAN_DETAILS), context.getScan().toJSON());
-                assertEquals(rs.getLong(NO_OF_RESULTS_ITERATED), 1);
-                assertEquals(rs.getString(QUERY), query);
-                assertEquals(rs.getString(QUERY_STATUS), QueryStatus.COMPLETED.toString());
-                assertTrue(LogLevel.TRACE == loglevel ? rs.getString(SCAN_METRICS_JSON).contains("scanMetrics")
-                        : rs.getString(SCAN_METRICS_JSON) == null);
-                assertEquals(rs.getTimestamp(START_TIME).getTime(),100);
-                assertEquals(rs.getString(TENANT_ID), null);
+            String logQuery = "SELECT * FROM " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_LOG_TABLE + "\"";
+            int delay = 5000;
+
+            // sleep for sometime to let query log committed
+            Thread.sleep(delay);
+            String explainQuery = "Explain " + "SELECT * FROM " + tableName + " where V = 'value5'";
+            try (ResultSet explainRS = conn.createStatement()
+                    .executeQuery(explainQuery);
+                 ResultSet rs = conn.createStatement().executeQuery(logQuery)) {
+                boolean foundQueryLog = false;
+                while (rs.next()) {
+                    if (rs.getString(QUERY_ID).equals(queryId)) {
+                        foundQueryLog = true;
+                        assertEquals(rs.getString(BIND_PARAMETERS), loglevel == LogLevel.TRACE ? "value5" : null);
+                        assertEquals(rs.getString(USER), System.getProperty("user.name"));
+                        assertEquals(rs.getString(CLIENT_IP), InetAddress.getLocalHost().getHostAddress());
+                        assertEquals(rs.getString(EXPLAIN_PLAN), QueryUtil.getExplainPlan(explainRS));
+                        assertEquals(rs.getString(GLOBAL_SCAN_DETAILS), context.getScan().toJSON());
+                        assertEquals(rs.getLong(NO_OF_RESULTS_ITERATED), 1);
+                        assertEquals(rs.getString(QUERY), query);
+                        assertEquals(rs.getString(QUERY_STATUS), QueryStatus.COMPLETED.toString());
+                        assertTrue(LogLevel.TRACE == loglevel ? rs.getString(SCAN_METRICS_JSON).contains("scanMetrics")
+                                : rs.getString(SCAN_METRICS_JSON) == null);
+                        assertEquals(rs.getTimestamp(START_TIME).getTime(), 100);
+                        assertEquals(rs.getString(TENANT_ID), null);
+                    }
+                }
+                assertTrue(foundQueryLog);
+                conn.close();
             }
-        }
-        assertTrue(foundQueryLog);
-        conn.close();
-        }finally{
+        }finally {
             EnvironmentEdgeManager.injectEdge(null);
         }
     }
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java
index b99ece6..016aa8f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java
@@ -839,9 +839,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
                 overAllQueryMetrics.startResultSetWatch();
             }
             currentRow = scanner.next();
-            if (currentRow == null) {
-                close();
-            }else{
+            if (currentRow != null) {
                 count++;
                 // Reset this projector with each row
                 if (this.rowProjectorWithDynamicCols != null) {


[phoenix] 02/12: PHOENIX-5173: LIKE and ILIKE statements return empty result list for search without wildcard

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit cbca95d870abe1e831f343263bcf750be330198f
Author: s.kadam <s....@salesforce.com>
AuthorDate: Fri Apr 19 23:53:54 2019 +0100

    PHOENIX-5173: LIKE and ILIKE statements return empty result list for search without wildcard
---
 .../apache/phoenix/end2end/LikeExpressionIT.java   | 24 ++++++++++++++++++++++
 .../apache/phoenix/compile/ExpressionCompiler.java |  3 ---
 .../apache/phoenix/compile/WhereOptimizerTest.java |  8 ++++++--
 3 files changed, 30 insertions(+), 5 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LikeExpressionIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LikeExpressionIT.java
index 0b061d5..65d55cc 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LikeExpressionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LikeExpressionIT.java
@@ -430,4 +430,28 @@ public class LikeExpressionIT extends ParallelStatsDisabledIT {
         rs = select.executeQuery();
         assertFalse(rs.next());
     }
+    //associated to PHOENIX-5173 jira
+    @Test
+    public void testLikeExpressionWithoutWildcards() throws Exception {
+        String table = generateUniqueName();
+        final String createTable = "CREATE TABLE "
+                + table + " (ID BIGINT NOT NULL PRIMARY KEY, USER_NAME VARCHAR(255))";
+        final String upsertTable = "UPSERT INTO " + table + " VALUES(1, 'Some Name')";
+        String likeSelect = "SELECT * FROM " + table + " WHERE USER_NAME LIKE 'Some Name'";
+        String iLikeSelect = "SELECT * FROM " + table + " WHERE USER_NAME ILIKE 'soMe nAme'";
+
+        try(Connection conn = DriverManager.getConnection(getUrl())) {
+            conn.setAutoCommit(true);
+            conn.createStatement().execute(createTable);
+            conn.createStatement().executeUpdate(upsertTable);
+            try(ResultSet rs = conn.createStatement().executeQuery(likeSelect)) {
+                assertTrue(rs.next());
+                assertFalse(rs.next());
+            }
+            try(ResultSet rs = conn.createStatement().executeQuery(iLikeSelect)) {
+                assertTrue(rs.next());
+                assertFalse(rs.next());
+            }
+        }
+    }
 }
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
index 3b0f6d7..807c2e2 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
@@ -507,9 +507,6 @@ public class ExpressionCompiler extends UnsupportedAllParseNodeVisitor<Expressio
             }
             if (index == -1) {
                 String rhsLiteral = LikeExpression.unescapeLike(pattern);
-                if (lhsMaxLength != null && lhsMaxLength != rhsLiteral.length()) {
-                    return LiteralExpression.newConstant(false, rhs.getDeterminism());
-                }
                 if (node.getLikeType() == LikeType.CASE_SENSITIVE) {
                   CompareOp op = node.isNegate() ? CompareOp.NOT_EQUAL : CompareOp.EQUAL;
                   if (pattern.equals(rhsLiteral)) {
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java
index cc6f55a..f2d5cfe 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java
@@ -921,14 +921,18 @@ public class WhereOptimizerTest extends BaseConnectionlessQueryTest {
     }
 
     @Test
-    public void testDegenerateLikeNoWildcard() throws SQLException {
+    public void testLikeNoWildcardExpression() throws SQLException {
         String tenantId = "000000000000001";
         String keyPrefix = "002";
         String query = "select * from atable where organization_id LIKE ? and entity_id  LIKE '" + keyPrefix + "'";
         List<Object> binds = Arrays.<Object>asList(tenantId);
         StatementContext context = compileStatement(query, binds);
         Scan scan = context.getScan();
-        assertDegenerate(scan);
+        byte[] startRow = ByteUtil.concat(
+                PVarchar.INSTANCE.toBytes(tenantId),StringUtil.padChar(PVarchar.INSTANCE.toBytes(keyPrefix),15));
+        assertArrayEquals(startRow, scan.getStartRow());
+        byte[] stopRow = ByteUtil.nextKey(startRow);
+        assertArrayEquals(stopRow, scan.getStopRow());
     }
 
     @Test


[phoenix] 09/12: PHOENIX-5199 Pherf overrides user provided properties like dataloader threadpool, monitor frequency etc with pherf.properties

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit ffc630f8d9cade551bddba05a69c40e4f89331fa
Author: Monani Mihir <mm...@salesforce.com>
AuthorDate: Fri Mar 15 11:18:12 2019 +0000

    PHOENIX-5199 Pherf overrides user provided properties like dataloader threadpool, monitor frequency etc with pherf.properties
---
 phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java       | 2 +-
 .../main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java    | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
index 43061e0..d92ffde 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/Pherf.java
@@ -156,7 +156,7 @@ public class Pherf {
                 writerThreadPoolSize =
                 command.getOptionValue("writerThreadSize",
                         properties.getProperty("pherf.default.dataloader.threadpool"));
-        properties.setProperty("pherf. default.dataloader.threadpool", writerThreadPoolSize);
+        properties.setProperty("pherf.default.dataloader.threadpool", writerThreadPoolSize);
         label = command.getOptionValue("label", null);
         compareResults = command.getOptionValue("compare", null);
         compareType = command.hasOption("useAverageCompareType") ? CompareType.AVERAGE : CompareType.MINIMUM;
diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
index c482b3f..b340a2b 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
@@ -84,8 +84,8 @@ public class WriteWorkload implements Workload {
 
     public WriteWorkload(PhoenixUtil phoenixUtil, XMLConfigParser parser, Scenario scenario, GeneratePhoenixStats generateStatistics)
             throws Exception {
-        this(phoenixUtil, PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES,
-                false),
+        this(phoenixUtil,
+                PherfConstants.create().getProperties(PherfConstants.PHERF_PROPERTIES, true),
                 parser, scenario, generateStatistics);
     }
 


[phoenix] 08/12: PHOENIX-5195 PHERF:- Handle batch failure in connection.commit() in WriteWorkload#upsertData

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit b66d81c8b55f003db70dd146f891e5e3602d926e
Author: Monani Mihir <mm...@salesforce.com>
AuthorDate: Fri Mar 15 08:26:35 2019 +0000

    PHOENIX-5195 PHERF:- Handle batch failure in connection.commit() in WriteWorkload#upsertData
---
 .../phoenix/pherf/workload/WriteWorkload.java      | 32 ++++++++++++----------
 1 file changed, 18 insertions(+), 14 deletions(-)

diff --git a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
index 4023383..c482b3f 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/WriteWorkload.java
@@ -24,7 +24,6 @@ import java.sql.Connection;
 import java.sql.Date;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
-import java.sql.Timestamp;
 import java.sql.Types;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -35,7 +34,6 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.pherf.PherfConstants;
 import org.apache.phoenix.pherf.PherfConstants.GeneratePhoenixStats;
 import org.apache.phoenix.pherf.configuration.Column;
@@ -294,26 +292,32 @@ public class WriteWorkload implements Workload {
                                     rowsCreated += result;
                                 }
                             }
-                            connection.commit();
-                            duration = System.currentTimeMillis() - last;
-                            logger.info("Writer (" + Thread.currentThread().getName()
-                                    + ") committed Batch. Total " + getBatchSize()
-                                    + " rows for this thread (" + this.hashCode() + ") in ("
-                                    + duration + ") Ms");
-
-                            if (i % PherfConstants.LOG_PER_NROWS == 0 && i != 0) {
-                                dataLoadThreadTime
-                                        .add(tableName, Thread.currentThread().getName(), i,
-                                                System.currentTimeMillis() - logStartTime);
-                                logStartTime = System.currentTimeMillis();
+                            try {
+                                connection.commit();
+                                duration = System.currentTimeMillis() - last;
+                                logger.info("Writer (" + Thread.currentThread().getName()
+                                        + ") committed Batch. Total " + getBatchSize()
+                                        + " rows for this thread (" + this.hashCode() + ") in ("
+                                        + duration + ") Ms");
+
+                                if (i % PherfConstants.LOG_PER_NROWS == 0 && i != 0) {
+                                    dataLoadThreadTime.add(tableName,
+                                        Thread.currentThread().getName(), i,
+                                        System.currentTimeMillis() - logStartTime);
+                                }
+                            } catch (SQLException e) {
+                                logger.warn("SQLException in commit operation", e);
                             }
 
+                            logStartTime = System.currentTimeMillis();
                             // Pause for throttling if configured to do so
                             Thread.sleep(threadSleepDuration);
                             // Re-compute the start time for the next batch
                             last = System.currentTimeMillis();
                         }
                     }
+                } catch (SQLException e) {
+                    throw e;
                 } finally {
                     // Need to keep the statement open to send the remaining batch of updates
                     if (!useBatchApi && stmt != null) {


[phoenix] 06/12: PHOENIX-5252 Add job priority option to UpdateStatisticsTool

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 045e0bdcbea71907d62871c8bd496e9e23a1fec0
Author: Xinyi Yan <xy...@salesforce.com>
AuthorDate: Sat Apr 20 01:25:02 2019 +0100

    PHOENIX-5252 Add job priority option to UpdateStatisticsTool
---
 .../phoenix/schema/stats/UpdateStatisticsTool.java | 32 +++++++++++++++++++++-
 .../schema/stats/UpdateStatisticsToolTest.java     | 15 ++++++++++
 2 files changed, 46 insertions(+), 1 deletion(-)

diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java
index 88b0f0a..110682d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/UpdateStatisticsTool.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.metrics.Gauge;
 import org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobPriority;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
@@ -78,6 +79,8 @@ public class UpdateStatisticsTool extends Configured implements Tool {
             "HBase Snapshot Name");
     private static final Option RESTORE_DIR_OPTION = new Option("d", "restore-dir", true,
             "Restore Directory for HBase snapshot");
+    private static final Option JOB_PRIORITY_OPTION = new Option("p", "job-priority", true,
+            "Define job priority from 0(highest) to 4");
     private static final Option RUN_FOREGROUND_OPTION =
             new Option("runfg", "run-foreground", false,
                     "If specified, runs UpdateStatisticsTool in Foreground. Default - Runs the build in background");
@@ -90,6 +93,7 @@ public class UpdateStatisticsTool extends Configured implements Tool {
     private String tableName;
     private String snapshotName;
     private Path restoreDir;
+    private JobPriority jobPriority;
     private boolean manageSnapshot;
     private boolean isForeground;
 
@@ -164,12 +168,35 @@ public class UpdateStatisticsTool extends Configured implements Tool {
         if (restoreDirOptionValue == null) {
             restoreDirOptionValue = getConf().get(FS_DEFAULT_NAME_KEY) + "/tmp";
         }
-        
+
+        jobPriority = getJobPriority(cmdLine);
+
         restoreDir = new Path(restoreDirOptionValue);
         manageSnapshot = cmdLine.hasOption(MANAGE_SNAPSHOT_OPTION.getOpt());
         isForeground = cmdLine.hasOption(RUN_FOREGROUND_OPTION.getOpt());
     }
 
+    public String getJobPriority() {
+        return this.jobPriority.toString();
+    }
+
+    private JobPriority getJobPriority(CommandLine cmdLine) {
+        String jobPriorityOption = cmdLine.getOptionValue(JOB_PRIORITY_OPTION.getOpt());
+         if (jobPriorityOption == null) {
+             return JobPriority.NORMAL;
+         }
+
+         switch (jobPriorityOption) {
+             case "0" : return JobPriority.VERY_HIGH;
+             case "1" : return JobPriority.HIGH;
+             case "2" : return JobPriority.NORMAL;
+             case "3" : return JobPriority.LOW;
+             case "4" : return JobPriority.VERY_LOW;
+             default:
+                 return JobPriority.NORMAL;
+         }
+    }
+
     private void configureJob() throws Exception {
         job = Job.getInstance(getConf(),
                 "UpdateStatistics-" + tableName + "-" + snapshotName);
@@ -187,6 +214,8 @@ public class UpdateStatisticsTool extends Configured implements Tool {
         job.setMapOutputValueClass(NullWritable.class);
         job.setOutputFormatClass(NullOutputFormat.class);
         job.setNumReduceTasks(0);
+        job.setPriority(this.jobPriority);
+
         TableMapReduceUtil.addDependencyJars(job);
         TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), PhoenixConnection.class, Chronology.class,
                 CharStream.class, TransactionSystemClient.class, TransactionNotInProgressException.class,
@@ -265,6 +294,7 @@ public class UpdateStatisticsTool extends Configured implements Tool {
         options.addOption(SNAPSHOT_NAME_OPTION);
         options.addOption(HELP_OPTION);
         options.addOption(RESTORE_DIR_OPTION);
+        options.addOption(JOB_PRIORITY_OPTION);
         options.addOption(RUN_FOREGROUND_OPTION);
         options.addOption(MANAGE_SNAPSHOT_OPTION);
         return options;
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/schema/stats/UpdateStatisticsToolTest.java b/phoenix-core/src/test/java/org/apache/phoenix/schema/stats/UpdateStatisticsToolTest.java
index 2262b0e..5c0a488 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/schema/stats/UpdateStatisticsToolTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/schema/stats/UpdateStatisticsToolTest.java
@@ -90,4 +90,19 @@ public class UpdateStatisticsToolTest {
         assertEquals("hdfs://base-dir/tmp", tool.getRestoreDir().toString());
     }
 
+    @Test
+    public void testJobPriorityInput() {
+        UpdateStatisticsTool tool = new UpdateStatisticsTool();
+        tool.parseArgs(new String[] {"-t", "table1"});
+        assertEquals("NORMAL", tool.getJobPriority());
+
+        tool.parseArgs(new String[] {"-t", "table1", "-p", "0"});
+        assertEquals("VERY_HIGH", tool.getJobPriority());
+
+        tool.parseArgs(new String[] {"-t", "table1", "-p", "-1"});
+        assertEquals("NORMAL", tool.getJobPriority());
+
+        tool.parseArgs(new String[] {"-t", "table1", "-p", "DSAFDAS"});
+        assertEquals("NORMAL", tool.getJobPriority());
+    }
 }
\ No newline at end of file


[phoenix] 07/12: PHOENIX-5181 support Math sin/cos/tan functions

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 2f225e3680977d3b4e0ace578d6ccacb69994718
Author: Xinyi Yan <xy...@salesforce.com>
AuthorDate: Thu Mar 7 18:48:57 2019 +0000

    PHOENIX-5181 support Math sin/cos/tan functions
---
 .../phoenix/end2end/MathTrigFunctionEnd2EndIT.java |  94 +++++++++++
 .../apache/phoenix/expression/ExpressionType.java  |   3 +
 .../phoenix/expression/function/CosFunction.java   |  56 +++++++
 .../phoenix/expression/function/SinFunction.java   |  56 +++++++
 .../phoenix/expression/function/TanFunction.java   |  56 +++++++
 .../phoenix/expression/MathTrigFunctionTest.java   | 179 +++++++++++++++++++++
 6 files changed, 444 insertions(+)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MathTrigFunctionEnd2EndIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MathTrigFunctionEnd2EndIT.java
new file mode 100644
index 0000000..b4f2b4f
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MathTrigFunctionEnd2EndIT.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.closeStmtAndConn;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+
+import org.apache.phoenix.expression.function.CosFunction;
+import org.apache.phoenix.expression.function.SinFunction;
+import org.apache.phoenix.expression.function.TanFunction;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * End to end tests for
+ * {@link org.apache.phoenix.expression.function.CosFunction}
+ * {@link org.apache.phoenix.expression.function.SinFunction}
+ * {@link org.apache.phoenix.expression.function.TanFunction}
+ */
+
+public class MathTrigFunctionEnd2EndIT extends ParallelStatsDisabledIT {
+
+    private static final String KEY = "key";
+    private String tableName;
+
+    @Before
+    public void initTable() throws Exception {
+        Connection conn = null;
+        PreparedStatement stmt = null;
+        tableName = generateUniqueName();
+
+        try {
+            conn = DriverManager.getConnection(getUrl());
+            String ddl;
+            ddl =
+                    "CREATE TABLE " + tableName + " (k VARCHAR NOT NULL PRIMARY KEY, doub DOUBLE)";
+            conn.createStatement().execute(ddl);
+            conn.commit();
+        } finally {
+            closeStmtAndConn(stmt, conn);
+        }
+    }
+
+    private void updateTableSpec(Connection conn, double data, String tableName) throws Exception {
+        PreparedStatement stmt =
+                conn.prepareStatement("UPSERT INTO " + tableName + " VALUES (?, ?)");
+        stmt.setString(1, KEY);
+        stmt.setDouble(2, data);
+        stmt.executeUpdate();
+        conn.commit();
+    }
+
+    private void testNumberSpec(Connection conn, double data, String tableName) throws Exception {
+        updateTableSpec(conn, data, tableName);
+        ResultSet rs =
+                conn.createStatement().executeQuery(
+                        "SELECT SIN(doub),COS(doub),TAN(doub) FROM " + tableName);
+        assertTrue(rs.next());
+        Double d = Double.valueOf(data);
+        assertTrue(twoDoubleEquals(rs.getDouble(1), Math.sin(data)));
+        assertTrue(twoDoubleEquals(rs.getDouble(2), Math.cos(data)));
+        assertTrue(twoDoubleEquals(rs.getDouble(3), Math.tan(data)));
+
+        assertTrue(!rs.next());
+    }
+
+    @Test
+    public void test() throws Exception {
+        Connection conn = DriverManager.getConnection(getUrl());
+        for (double d : new double[] { 0.0, 1.0, -1.0, 123.1234, -123.1234 }) {
+            testNumberSpec(conn, d, tableName);
+        }
+    }
+}
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java
index a18928c..8f36e23 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/ExpressionType.java
@@ -188,6 +188,9 @@ public enum ExpressionType {
     ArrayRemoveFunction(ArrayRemoveFunction.class),
     TransactionProviderNameFunction(TransactionProviderNameFunction.class),
     MathPIFunction(MathPIFunction.class),
+    SinFunction(SinFunction.class),
+    CosFunction(CosFunction.class),
+    TanFunction(TanFunction.class),
     ;
 
     ExpressionType(Class<? extends Expression> clazz) {
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/CosFunction.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/CosFunction.java
new file mode 100644
index 0000000..b6532d8
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/CosFunction.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.expression.function;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.parse.FunctionParseNode.Argument;
+import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+
+@BuiltInFunction(name = CosFunction.NAME, args = { @Argument(allowedTypes = { PDouble.class,
+        PDecimal.class }) })
+public class CosFunction extends JavaMathOneArgumentFunction {
+
+    public static final String NAME = "COS";
+
+    public CosFunction() {
+    }
+
+    public CosFunction(List<Expression> children) throws SQLException {
+        super(children);
+    }
+
+    @Override
+    public String getName() {
+        return NAME;
+    }
+
+    @Override
+    protected double compute(double firstArg) {
+        return Math.cos(firstArg);
+    }
+
+    @Override
+    public OrderPreserving preservesOrder() {
+        return OrderPreserving.YES;
+    }
+}
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SinFunction.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SinFunction.java
new file mode 100644
index 0000000..3b29f7f
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SinFunction.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.expression.function;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.parse.FunctionParseNode.Argument;
+import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+
+@BuiltInFunction(name = SinFunction.NAME, args = { @Argument(allowedTypes = { PDouble.class,
+        PDecimal.class }) })
+public class SinFunction extends JavaMathOneArgumentFunction {
+
+    public static final String NAME = "SIN";
+
+    public SinFunction() {
+    }
+
+    public SinFunction(List<Expression> children) throws SQLException {
+        super(children);
+    }
+
+    @Override
+    public String getName() {
+        return NAME;
+    }
+
+    @Override
+    protected double compute(double firstArg) {
+        return Math.sin(firstArg);
+    }
+
+    @Override
+    public OrderPreserving preservesOrder() {
+        return OrderPreserving.YES;
+    }
+}
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/TanFunction.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/TanFunction.java
new file mode 100644
index 0000000..5951cab
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/TanFunction.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.expression.function;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.parse.FunctionParseNode.Argument;
+import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+
+@BuiltInFunction(name = TanFunction.NAME, args = { @Argument(allowedTypes = { PDouble.class,
+        PDecimal.class }) })
+public class TanFunction extends JavaMathOneArgumentFunction {
+
+    public static final String NAME = "TAN";
+
+    public TanFunction() {
+    }
+
+    public TanFunction(List<Expression> children) throws SQLException {
+        super(children);
+    }
+
+    @Override
+    public String getName() {
+        return NAME;
+    }
+
+    @Override
+    protected double compute(double firstArg) {
+        return Math.tan(firstArg);
+    }
+
+    @Override
+    public OrderPreserving preservesOrder() {
+        return OrderPreserving.YES;
+    }
+}
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/expression/MathTrigFunctionTest.java b/phoenix-core/src/test/java/org/apache/phoenix/expression/MathTrigFunctionTest.java
new file mode 100644
index 0000000..796106f
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/expression/MathTrigFunctionTest.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.expression;
+
+import java.math.BigDecimal;
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.phoenix.expression.function.CosFunction;
+import org.apache.phoenix.expression.function.SinFunction;
+import org.apache.phoenix.expression.function.TanFunction;
+import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PFloat;
+import org.apache.phoenix.schema.types.PInteger;
+import org.apache.phoenix.schema.types.PLong;
+import org.apache.phoenix.schema.types.PNumericType;
+import org.apache.phoenix.schema.types.PSmallint;
+import org.apache.phoenix.schema.types.PTinyint;
+import org.apache.phoenix.schema.types.PUnsignedDouble;
+import org.apache.phoenix.schema.types.PUnsignedFloat;
+import org.apache.phoenix.schema.types.PUnsignedInt;
+import org.apache.phoenix.schema.types.PUnsignedLong;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+/**
+ * Unit tests for {@link SinFunction}
+ * Unit tests for {@link CosFunction}
+ * Unit tests for {@link TanFunction}
+ */
+
+@RunWith(Parameterized.class)
+public class MathTrigFunctionTest {
+
+    private Number[] value;
+    private PNumericType dataType;
+
+    public MathTrigFunctionTest(Number[] value, PNumericType dataType) {
+        this.value = value;
+        this.dataType = dataType;
+    }
+
+    @Parameters(name = "{0} {1}")
+    public static Collection<Object> data() {
+        return Arrays.asList(new Object[][]{
+            {
+                new BigDecimal[]{BigDecimal.valueOf(1.0), BigDecimal.valueOf(0.0),
+                    BigDecimal.valueOf(-1.0), BigDecimal.valueOf(123.1234),
+                    BigDecimal.valueOf(-123.1234)},
+                PDecimal.INSTANCE
+            },
+            {
+                new Float[]{1.0f, 0.0f, -1.0f, Float.MAX_VALUE, Float.MIN_VALUE,
+                    -Float.MAX_VALUE, -Float.MIN_VALUE, 123.1234f, -123.1234f},
+                PFloat.INSTANCE
+            },
+            {
+                new Float[]{1.0f, 0.0f, Float.MAX_VALUE, Float.MIN_VALUE, 123.1234f},
+                PUnsignedFloat.INSTANCE
+            },
+            {
+                new Double[]{1.0, 0.0, -1.0, Double.MAX_VALUE, Double.MIN_VALUE,
+                    -Double.MAX_VALUE, -Double.MIN_VALUE, 123.1234, -123.1234},
+                PDouble.INSTANCE
+            },
+            {
+                new Double[]{1.0, 0.0, Double.MAX_VALUE, Double.MIN_VALUE, 123.1234},
+                PUnsignedDouble.INSTANCE
+            },
+            {
+                new Long[]{(long) 1, (long) 0, (long) -1, Long.MAX_VALUE,
+                    Long.MIN_VALUE, (long) 123, (long) -123},
+                PLong.INSTANCE
+            },
+            {
+                new Long[]{(long) 1, (long) 0, Long.MAX_VALUE, (long) 123},
+                PUnsignedLong.INSTANCE
+            },
+            {
+                new Integer[]{1, 0, -1, Integer.MAX_VALUE, Integer.MIN_VALUE, 123, -123},
+                PInteger.INSTANCE
+            },
+            {
+                new Integer[]{1, 0, Integer.MAX_VALUE, 123},
+                PUnsignedInt.INSTANCE
+            },
+            {
+                new Short[]{(short) 1, (short) 0, (short) -1, Short.MAX_VALUE,
+                    Short.MIN_VALUE, (short) 123, (short) -123},
+                PSmallint.INSTANCE
+            },
+            {
+                new Short[]{(short) 1, (short) 0, Short.MAX_VALUE, (short) 123},
+                PSmallint.INSTANCE
+            },
+            {
+                new Byte[]{(byte) 1, (byte) 0, (byte) -1, Byte.MAX_VALUE,
+                    Byte.MIN_VALUE, (byte) 123, (byte) -123},
+                PTinyint.INSTANCE
+            },
+            {
+                new Byte[]{(byte) 1, (byte) 0, Byte.MAX_VALUE, (byte) 123},
+                PTinyint.INSTANCE
+            }
+    });
+    }
+
+    private boolean testExpression(LiteralExpression literal, double expectedResult,
+                                          String testedFunction) throws SQLException {
+        List<Expression> expressions = Lists.newArrayList((Expression) literal);
+        ImmutableBytesWritable ptr = new ImmutableBytesWritable();
+        Expression mathFunction = null;
+
+        if (testedFunction.equals("SIN")) {
+            mathFunction = new SinFunction(expressions);
+        } else if (testedFunction.equals("COS")) {
+            mathFunction = new CosFunction(expressions);
+        } else if (testedFunction.equals("TAN")) {
+            mathFunction = new TanFunction(expressions);
+        }
+
+        boolean ret = mathFunction.evaluate(null, ptr);
+        if (ret) {
+            Double result =
+                    (Double) mathFunction.getDataType().toObject(ptr, mathFunction.getSortOrder());
+            assertTrue(BaseTest.twoDoubleEquals(result.doubleValue(), expectedResult));
+        }
+
+        return ret;
+    }
+
+    private void test(Number value, PNumericType dataType, double expectedResult,
+                             String testedFunction)
+            throws SQLException {
+        LiteralExpression literal = LiteralExpression.newConstant(value, dataType, SortOrder.ASC);
+        boolean ret1 = testExpression(literal, expectedResult, testedFunction);
+
+        literal = LiteralExpression.newConstant(value, dataType, SortOrder.DESC);
+        boolean ret2 = testExpression(literal, expectedResult, testedFunction);
+        assertEquals(ret1, ret2);
+    }
+
+    @Test
+    public void testBatch()
+            throws SQLException {
+        for (int i = 0; i < value.length; ++i) {
+            test(value[i], dataType, Math.sin(value[i].doubleValue()), "SIN");
+            test(value[i], dataType, Math.cos(value[i].doubleValue()), "COS");
+            test(value[i], dataType, Math.tan(value[i].doubleValue()), "TAN");
+        }
+    }
+}


[phoenix] 05/12: PHOENIX-5235: Update SQLline version to the latest

Posted by pb...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

pboado pushed a commit to branch 5.x-cdh6
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit d55fc511616c1b2d33f79b8671549657e2c3e6de
Author: s.kadam <s....@salesforce.com>
AuthorDate: Fri Apr 19 23:05:27 2019 +0100

    PHOENIX-5235: Update SQLline version to the latest
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index dd4dda9..a356365 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,7 +98,7 @@
     <commons-lang.version>${cdh.commons-lang3.version}</commons-lang.version>
     <commons-logging.version>${cdh.commons-logging.version}</commons-logging.version>
     <commons-csv.version>1.0</commons-csv.version>
-    <sqlline.version>1.2.0</sqlline.version>
+    <sqlline.version>1.7.0</sqlline.version>
     <guava.version>13.0.1</guava.version>
     <flume.version>${cdh.flume-ng.version}</flume.version>
     <kafka.version>${cdh.kafka.version}</kafka.version>