You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2013/10/12 18:38:13 UTC
svn commit: r1531557 [4/20] - in /hive/branches/maven: ./ ant/
ant/src/org/apache/hadoop/hive/ant/ beeline/
beeline/src/java/org/apache/hive/beeline/
beeline/src/test/org/apache/hive/beeline/src/test/ cli/
cli/src/test/org/apache/hadoop/hive/cli/ commo...
Modified: hive/branches/maven/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/maven/pom.xml?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/pom.xml (original)
+++ hive/branches/maven/pom.xml Sat Oct 12 16:37:47 2013
@@ -35,7 +35,6 @@
<module>hbase-handler</module>
<module>hcatalog</module>
<module>hwi</module>
- <module>itests</module>
<module>jdbc</module>
<module>metastore</module>
<module>ql</module>
@@ -43,12 +42,16 @@
<module>service</module>
<module>shims</module>
<module>testutils</module>
+ <module>packaging</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-
- <hive.path.to.root>..</hive.path.to.root>
+ <maven.local.repository>${user.home}/.m2/repository</maven.local.repository>
+ <hive.path.to.root>.</hive.path.to.root>
+ <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
+ <test.warehouse.dir>${project.build.directory}/warehouse</test.warehouse.dir>
+ <test.warehouse.scheme>pfile://</test.warehouse.scheme>
<!-- the versions of libraries that we use -->
<activemq.version>5.5.0</activemq.version>
@@ -151,25 +154,6 @@
</repository>
</repositories>
- <profiles>
- <profile>
- <id>hadoop-1</id>
- <activation>
- <activeByDefault>true</activeByDefault>
- </activation>
- <properties>
- <active.hadoop.version>${hadoop-20S.version}</active.hadoop.version>
- </properties>
- </profile>
- <profile>
- <id>hadoop-2</id>
- <properties>
- <active.hadoop.version>${hadoop-23.version}</active.hadoop.version>
- </properties>
- </profile>
- </profiles>
-
-
<dependencies>
<!-- global dependencies -->
<dependency>
@@ -212,33 +196,11 @@
<version>20020829</version>
</dependency>
</dependencies>
- <executions>
- <execution>
- <id>define-classpath</id>
- <phase>process-resources</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <exportAntProperties>true</exportAntProperties>
- <target>
- <property name="maven.test.classpath" refid="maven.test.classpath"/>
- </target>
- </configuration>
- </execution>
- <execution>
- <id>delete-test-tempdir</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <target>
- <delete dir="${project.build.directory}/test-tmp" />
- </target>
- </configuration>
- </execution>
- </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.4</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -254,28 +216,6 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.16</version>
- <configuration>
- <redirectTestOutputToFile>true</redirectTestOutputToFile>
- <reuseForks>false</reuseForks>
- <failIfNoTests>false</failIfNoTests>
- <additionalClasspathElements>
- <additionalClasspathElement>${basedir}/${hive.path.to.root}/data/conf</additionalClasspathElement>
- <additionalClasspathElement>${basedir}/${hive.path.to.root}/conf</additionalClasspathElement>
- </additionalClasspathElements>
- <environmentVariables>
- <TZ>US/Pacific</TZ>
- <LANG>en_US.UTF-8</LANG>
- <HADOOP_CLASSPATH>${basedir}/${hive.path.to.root}/data/conf:${basedir}/${hive.path.to.root}/conf</HADOOP_CLASSPATH>
- <HIVE_HADOOP_TEST_CLASSPATH>${maven.test.classpath}</HIVE_HADOOP_TEST_CLASSPATH>
- </environmentVariables>
- <systemPropertyVariables>
- <build.dir>${project.build.directory}</build.dir>
- <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
- <hive.version>${project.version}</hive.version>
- <test.data.dir>${basedir}/${hive.path.to.root}/data/files</test.data.dir>
- <test.tmp.dir>${project.build.directory}/test-tmp</test.tmp.dir>
- </systemPropertyVariables>
- </configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -306,6 +246,94 @@
</plugin>
</plugins>
</pluginManagement>
+
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>define-classpath</id>
+ <phase>process-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <exportAntProperties>true</exportAntProperties>
+ <target>
+ <property name="maven.test.classpath" refid="maven.test.classpath"/>
+ </target>
+ </configuration>
+ </execution>
+ <execution>
+ <id>setup-test-dirs</id>
+ <phase>process-test-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <delete dir="${test.tmp.dir}" />
+ <delete dir="${test.warehouse.dir}" />
+ <mkdir dir="${test.tmp.dir}" />
+ <mkdir dir="${test.warehouse.dir}" />
+ <mkdir dir="${test.tmp.dir}/conf" />
+ <!-- copies hive-site.xml so it can be modified -->
+ <copy todir="${test.tmp.dir}/conf/">
+ <fileset dir="${basedir}/${hive.path.to.root}/data/conf/"/>
+ </copy>
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <excludes>
+ <exclude>**/TestSerDe.java</exclude>
+ <exclude>**/TestHiveMetaStore.java</exclude>
+ <exclude>**/ql/exec/vector/util/*.java</exclude>
+ <exclude>**/ql/exec/vector/udf/legacy/*.java</exclude>
+ <exclude>**/ql/exec/vector/udf/generic/*.java</exclude>
+ <exclude>**/TestHiveServer2Concurrency.java</exclude>
+ <exclude>**/TestHiveMetaStore.java</exclude>
+ </excludes>
+ <redirectTestOutputToFile>true</redirectTestOutputToFile>
+ <reuseForks>false</reuseForks>
+ <failIfNoTests>false</failIfNoTests>
+ <additionalClasspathElements>
+ <additionalClasspathElement>${test.tmp.dir}/conf</additionalClasspathElement>
+ <additionalClasspathElement>${basedir}/${hive.path.to.root}/conf</additionalClasspathElement>
+ </additionalClasspathElements>
+ <environmentVariables>
+ <TZ>US/Pacific</TZ>
+ <LANG>en_US.UTF-8</LANG>
+ <HADOOP_CLASSPATH>${test.tmp.dir}/conf:${basedir}/${hive.path.to.root}/conf</HADOOP_CLASSPATH>
+ <HIVE_HADOOP_TEST_CLASSPATH>${maven.test.classpath}</HIVE_HADOOP_TEST_CLASSPATH>
+ </environmentVariables>
+ <systemPropertyVariables>
+ <build.dir>${project.build.directory}</build.dir>
+ <derby.version>${derby.version}</derby.version>
+ <derby.stream.error.file>${test.tmp.dir}/derby.log</derby.stream.error.file>
+ <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
+ <hive.root>${basedir}/${hive.path.to.root}/</hive.root>
+ <hive.version>${project.version}</hive.version>
+ <maven.local.repository>${maven.local.repository}</maven.local.repository>
+ <log4j.configuration>file://${test.tmp.dir}/conf/hive-log4j.properties</log4j.configuration>
+ <java.io.tmpdir>${test.tmp.dir}</java.io.tmpdir>
+ <test.data.files>${basedir}/${hive.path.to.root}/data/files</test.data.files>
+ <test.data.dir>${basedir}/${hive.path.to.root}/data/files</test.data.dir>
+ <test.tmp.dir>${test.tmp.dir}</test.tmp.dir>
+ <test.dfs.mkdir>${test.dfs.mkdir}</test.dfs.mkdir>
+ <test.output.overwrite>${test.output.overwrite}</test.output.overwrite>
+ <test.warehouse.dir>${test.warehouse.scheme}${test.warehouse.dir}</test.warehouse.dir>
+ <test.src.tables>src,src1,srcbucket,srcbucket2,src_json,src_thrift,src_sequencefile,srcpart,alltypesorc</test.src.tables>
+ </systemPropertyVariables>
+ </configuration>
+ </plugin>
+ </plugins>
</build>
</project>
Modified: hive/branches/maven/ql/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/pom.xml?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/pom.xml (original)
+++ hive/branches/maven/ql/pom.xml Sat Oct 12 16:37:47 2013
@@ -27,6 +27,10 @@
<packaging>jar</packaging>
<name>Hive Query Language</name>
+ <properties>
+ <hive.path.to.root>..</hive.path.to.root>
+ </properties>
+
<dependencies>
<!-- intra-project -->
<dependency>
@@ -34,6 +38,11 @@
<artifactId>hive-metastore</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-ant</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<!-- inter-project -->
<dependency>
<groupId>com.esotericsoftware.kryo</groupId>
@@ -226,41 +235,26 @@
</configuration>
</plugin>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
- <id>add-source</id>
+ <id>generate-sources</id>
<phase>generate-sources</phase>
- <goals>
- <goal>add-source</goal>
- </goals>
<configuration>
- <sources>
- <source>src/gen/protobuf/gen-java</source>
- <source>src/gen/thrift/gen-javabean</source>
- </sources>
+ <target>
+ <property name="compile.classpath" refid="maven.compile.classpath"/>
+ <taskdef name="vectorcodegen" classname="org.apache.hadoop.hive.ant.GenVectorCode"
+ classpath="${compile.classpath}"/>
+ <mkdir dir="${project.build.directory}/generated-sources/java/org/apache/hadoop/hive/ql/exec/vector/expressions/gen/"/>
+ <mkdir dir="${project.build.directory}/generated-sources/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/gen/"/>
+ <mkdir dir="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/ql/exec/vector/expressions/gen/"/>
+ <vectorcodegen templateBaseDir="${basedir}/src/gen/vectorization/" buildDir="${project.build.directory}" />
+ </target>
</configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-install-plugin</artifactId>
- <executions>
- <execution>
- <id>install-exec-jar</id>
- <phase>install</phase>
<goals>
- <goal>install-file</goal>
+ <goal>run</goal>
</goals>
- <configuration>
- <file>${basedir}/target/hive-ql-${project.version}-jar-with-dependencies.jar</file>
- <groupId>org.apache.hive</groupId>
- <artifactId>hive-exec</artifactId>
- <version>${project.version}</version>
- <packaging>jar</packaging>
- </configuration>
</execution>
</executions>
</plugin>
@@ -280,13 +274,14 @@
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
+ <id>build-exec-bundle</id>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
- <shadedClassifierName>jar-with-dependencies</shadedClassifierName>
+ <shadedClassifierName>exec-bundle</shadedClassifierName>
<artifactSet>
<includes>
<!-- order is meant to be the same as the ant build -->
@@ -312,6 +307,38 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/gen/protobuf/gen-java</source>
+ <source>src/gen/thrift/gen-javabean</source>
+ <source>${project.build.directory}/generated-sources/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ <execution>
+ <id>add-test-sources</id>
+ <phase>generate-test-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.build.directory}/generated-test-sources/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
Modified: hive/branches/maven/ql/src/java/conf/hive-exec-log4j.properties
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/conf/hive-exec-log4j.properties?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/conf/hive-exec-log4j.properties (original)
+++ hive/branches/maven/ql/src/java/conf/hive-exec-log4j.properties Sat Oct 12 16:37:47 2013
@@ -17,7 +17,8 @@
# Define some default values that can be overridden by system properties
hive.log.threshold=ALL
hive.root.logger=INFO,FA
-hive.log.dir=/tmp/${user.name}
+hive.log.dir=${java.io.tmpdir}/${user.name}
+hive.query.id=hadoop
hive.log.file=${hive.query.id}.log
# Define the root logger to the system property "hadoop.root.logger".
Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java Sat Oct 12 16:37:47 2013
@@ -658,6 +658,12 @@ public class ExecDriver extends Task<Map
boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
+ String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim();
+ if(queryId.isEmpty()) {
+ queryId = "unknown-" + System.currentTimeMillis();
+ }
+ System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
+
if (noLog) {
// If started from main(), and noLog is on, we should not output
// any logs. To turn the log on, please set -Dtest.silent=false
Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java Sat Oct 12 16:37:47 2013
@@ -61,7 +61,7 @@ public class MapRedTask extends ExecDriv
static final String HIVE_DEBUG_RECURSIVE = "HIVE_DEBUG_RECURSIVE";
static final String HIVE_MAIN_CLIENT_DEBUG_OPTS = "HIVE_MAIN_CLIENT_DEBUG_OPTS";
static final String HIVE_CHILD_CLIENT_DEBUG_OPTS = "HIVE_CHILD_CLIENT_DEBUG_OPTS";
- static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive"};
+ static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive", "hive.query.id"};
private transient ContentSummary inputSummary = null;
private transient boolean runningViaChild = false;
Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java Sat Oct 12 16:37:47 2013
@@ -87,7 +87,7 @@ public class MapredLocalTask extends Tas
public static transient final Log l4j = LogFactory.getLog(MapredLocalTask.class);
static final String HADOOP_MEM_KEY = "HADOOP_HEAPSIZE";
static final String HADOOP_OPTS_KEY = "HADOOP_OPTS";
- static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive"};
+ static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive", "hive.query.id"};
public static MemoryMXBean memoryMXBean;
private static final Log LOG = LogFactory.getLog(MapredLocalTask.class);
Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java Sat Oct 12 16:37:47 2013
@@ -274,6 +274,8 @@ class TextMetaDataFormatter implements M
int numOfFiles = 0;
boolean unknown = false;
+ System.err.println("XXX tblPath " + tblPath);
+ System.err.println("XXX locations " + locations);
FileSystem fs = tblPath.getFileSystem(conf);
// in case all files in locations do not exist
try {
Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java Sat Oct 12 16:37:47 2013
@@ -75,11 +75,11 @@ public class EximUtil {
String scheme = uri.getScheme();
String authority = uri.getAuthority();
String path = uri.getPath();
- LOG.debug("Path before norm :" + path);
+ LOG.info("Path before norm :" + path);
// generate absolute path relative to home directory
if (!path.startsWith("/")) {
if (testMode) {
- path = (new Path(System.getProperty("build.dir.hive"),
+ path = (new Path(System.getProperty("test.tmp.dir"),
path)).toUri().getPath();
} else {
path = (new Path(new Path("/user/" + System.getProperty("user.name")),
@@ -102,7 +102,7 @@ public class EximUtil {
authority = defaultURI.getAuthority();
}
- LOG.debug("Scheme:" + scheme + ", authority:" + authority + ", path:" + path);
+ LOG.info("Scheme:" + scheme + ", authority:" + authority + ", path:" + path);
Collection<String> eximSchemes = conf.getStringCollection(
HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname);
if (!eximSchemes.contains(scheme)) {
@@ -144,7 +144,7 @@ public class EximUtil {
String authority = uri.getAuthority();
String path = uri.getPath();
if (!path.startsWith("/")) {
- path = (new Path(System.getProperty("build.dir.hive"),
+ path = (new Path(System.getProperty("test.tmp.dir"),
path)).toUri().getPath();
}
if (StringUtils.isEmpty(scheme)) {
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Sat Oct 12 16:37:47 2013
@@ -108,12 +108,7 @@ public class QTestUtil {
private final Set<String> qSkipSet;
private final Set<String> qSortSet;
private static final String SORT_SUFFIX = ".sorted";
- public static final HashSet<String> srcTables = new HashSet<String>
- (Arrays.asList(new String [] {
- "src", "src1", "srcbucket", "srcbucket2", "src_json", "src_thrift",
- "src_sequencefile", "srcpart", "alltypesorc"
- }));
-
+ public static final HashSet<String> srcTables = new HashSet<String>();
private ParseDriver pd;
private Hive db;
protected HiveConf conf;
@@ -128,6 +123,18 @@ public class QTestUtil {
private String hadoopVer = null;
private QTestSetup setup = null;
+ static {
+ for (String srcTable : System.getProperty("test.src.tables", "").trim().split(",")) {
+ srcTable = srcTable.trim();
+ if (!srcTable.isEmpty()) {
+ srcTables.add(srcTable);
+ }
+ }
+ if (srcTables.isEmpty()) {
+ throw new AssertionError("Source tables cannot be empty");
+ }
+ }
+
public boolean deleteDirectory(File path) {
if (path.exists()) {
File[] files = path.listFiles();
@@ -1156,7 +1163,6 @@ public class QTestUtil {
});
public int checkCliDriverResults(String tname) throws Exception {
- String[] cmdArray;
assert(qMap.containsKey(tname));
String outFileName = outPath(outDir, tname + ".out");
@@ -1178,7 +1184,7 @@ public class QTestUtil {
private static int overwriteResults(String inFileName, String outFileName) throws Exception {
// This method can be replaced with Files.copy(source, target, REPLACE_EXISTING)
// once Hive uses JAVA 7.
- System.out.println("Overwriting results");
+ System.out.println("Overwriting results " + inFileName + " to " + outFileName);
return executeCmd(new String[] {
"cp",
getQuotedString(inFileName),
@@ -1352,7 +1358,7 @@ public class QTestUtil {
public void preTest(HiveConf conf) throws Exception {
if (zooKeeperCluster == null) {
- String tmpdir = System.getProperty("user.dir")+"/../build/ql/tmp";
+ String tmpdir = System.getProperty("test.tmp.dir");
zooKeeperCluster = new MiniZooKeeperCluster();
zkPort = zooKeeperCluster.startup(new File(tmpdir, "zookeeper"));
}
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Sat Oct 12 16:37:47 2013
@@ -69,10 +69,9 @@ public class TestExecDriver extends Test
static HiveConf conf;
- private static String tmpdir = System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")
- + File.separator;
- private static Log LOG = LogFactory.getLog(TestExecDriver.class);
- private static Path tmppath = new Path(tmpdir);
+ private static final String tmpdir = System.getProperty("test.tmp.dir");
+ private static final Log LOG = LogFactory.getLog(TestExecDriver.class);
+ private static final Path tmppath = new Path(tmpdir);
private static Hive db;
private static FileSystem fs;
@@ -131,8 +130,7 @@ public class TestExecDriver extends Test
}
} catch (Throwable e) {
- e.printStackTrace();
- throw new RuntimeException("Encountered throwable");
+ throw new RuntimeException("Encountered throwable", e);
}
}
@@ -156,10 +154,10 @@ public class TestExecDriver extends Test
// inbuilt assumption that the testdir has only one output file.
Path di_test = new Path(tmppath, testdir);
if (!fs.exists(di_test)) {
- throw new RuntimeException(tmpdir + testdir + " does not exist");
+ throw new RuntimeException(tmpdir + File.separator + testdir + " does not exist");
}
if (!fs.getFileStatus(di_test).isDir()) {
- throw new RuntimeException(tmpdir + testdir + " is not a directory");
+ throw new RuntimeException(tmpdir + File.separator + testdir + " is not a directory");
}
FSDataInputStream fi_test = fs.open((fs.listStatus(di_test))[0].getPath());
@@ -198,7 +196,7 @@ public class TestExecDriver extends Test
@SuppressWarnings("unchecked")
private void populateMapPlan1(Table src) {
- Operator<FileSinkDesc> op2 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op2 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapplan1.out", Utilities.defaultTd, true));
Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
@@ -209,7 +207,7 @@ public class TestExecDriver extends Test
@SuppressWarnings("unchecked")
private void populateMapPlan2(Table src) {
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapplan2.out", Utilities.defaultTd, false));
Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat",
@@ -245,7 +243,7 @@ public class TestExecDriver extends Test
mr.setReduceWork(rWork);
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan1.out", Utilities.defaultTd, false));
Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
@@ -275,7 +273,7 @@ public class TestExecDriver extends Test
mr.setReduceWork(rWork);
// reduce side work
- Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan2.out", Utilities.defaultTd, false));
Operator<FilterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
@@ -319,7 +317,7 @@ public class TestExecDriver extends Test
rWork.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
// reduce side work
- Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan3.out", Utilities.defaultTd, false));
Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(Utilities
@@ -362,7 +360,7 @@ public class TestExecDriver extends Test
mr.setReduceWork(rWork);
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan4.out", Utilities.defaultTd, false));
Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
@@ -401,7 +399,7 @@ public class TestExecDriver extends Test
rWork.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan5.out", Utilities.defaultTd, false));
Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
@@ -442,7 +440,7 @@ public class TestExecDriver extends Test
rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
// reduce side work
- Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
+ "mapredplan6.out", Utilities.defaultTd, false));
Operator<FilterDesc> op2 = OperatorFactory.get(getTestFilterDesc("0"), op3);
@@ -472,118 +470,70 @@ public class TestExecDriver extends Test
public void testMapPlan1() throws Exception {
LOG.info("Beginning testMapPlan1");
-
- try {
- populateMapPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
- executePlan();
- fileDiff("lt100.txt.deflate", "mapplan1.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+ executePlan();
+ fileDiff("lt100.txt.deflate", "mapplan1.out");
}
public void testMapPlan2() throws Exception {
LOG.info("Beginning testMapPlan2");
-
- try {
- populateMapPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
- executePlan();
- fileDiff("lt100.txt", "mapplan2.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src"));
+ executePlan();
+ fileDiff("lt100.txt", "mapplan2.out");
}
public void testMapRedPlan1() throws Exception {
LOG.info("Beginning testMapRedPlan1");
-
- try {
- populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("kv1.val.sorted.txt", "mapredplan1.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("kv1.val.sorted.txt", "mapredplan1.out");
}
public void testMapRedPlan2() throws Exception {
LOG.info("Beginning testMapPlan2");
-
- try {
- populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("lt100.sorted.txt", "mapredplan2.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("lt100.sorted.txt", "mapredplan2.out");
}
public void testMapRedPlan3() throws Exception {
LOG.info("Beginning testMapPlan3");
-
- try {
- populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"), db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2"));
- executePlan();
- fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"), db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2"));
+ executePlan();
+ fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out");
}
public void testMapRedPlan4() throws Exception {
LOG.info("Beginning testMapPlan4");
-
- try {
- populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("kv1.string-sorted.txt", "mapredplan4.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("kv1.string-sorted.txt", "mapredplan4.out");
}
public void testMapRedPlan5() throws Exception {
LOG.info("Beginning testMapPlan5");
-
- try {
- populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("kv1.string-sorted.txt", "mapredplan5.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("kv1.string-sorted.txt", "mapredplan5.out");
}
public void testMapRedPlan6() throws Exception {
LOG.info("Beginning testMapPlan6");
-
- try {
- populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- "src"));
- executePlan();
- fileDiff("lt100.sorted.txt", "mapredplan6.out");
- } catch (Throwable e) {
- e.printStackTrace();
- fail("Got Throwable");
- }
+ populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ "src"));
+ executePlan();
+ fileDiff("lt100.sorted.txt", "mapredplan6.out");
}
}
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java Sat Oct 12 16:37:47 2013
@@ -20,9 +20,9 @@ package org.apache.hadoop.hive.ql.hooks;
import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+import java.util.HashSet;
import java.util.Set;
-import org.apache.hadoop.hive.ql.QTestUtil;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.security.UserGroupInformation;
@@ -33,6 +33,20 @@ import org.apache.hadoop.security.UserGr
*/
public class EnforceReadOnlyTables implements ExecuteWithHookContext {
+ private static final Set<String> READ_ONLY_TABLES = new HashSet<String>();
+
+ static {
+ for (String srcTable : System.getProperty("test.src.tables", "").trim().split(",")) {
+ srcTable = srcTable.trim();
+ if (!srcTable.isEmpty()) {
+ READ_ONLY_TABLES.add(srcTable);
+ }
+ }
+ if (READ_ONLY_TABLES.isEmpty()) {
+ throw new AssertionError("Source tables cannot be empty");
+ }
+ }
+
@Override
public void run(HookContext hookContext) throws Exception {
SessionState ss = SessionState.get();
@@ -53,7 +67,7 @@ public class EnforceReadOnlyTables imple
(w.getTyp() == WriteEntity.Type.PARTITION)) {
Table t = w.getTable();
if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(t.getDbName())
- && QTestUtil.srcTables.contains(t.getTableName())) {
+ && READ_ONLY_TABLES.contains(t.getTableName())) {
throw new RuntimeException ("Cannot overwrite read-only table: " + t.getTableName());
}
}
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java Sat Oct 12 16:37:47 2013
@@ -63,7 +63,7 @@ public class PerformTestRCFileAndSeqFile
}
conf.setInt(RCFile.Writer.COLUMNS_BUFFER_SIZE_CONF_STR, 1 * 1024 * 1024);
if (file == null) {
- Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ Path dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
testRCFile = new Path(dir, "test_rcfile");
testSeqFile = new Path(dir, "test_seqfile");
} else {
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java Sat Oct 12 16:37:47 2013
@@ -60,7 +60,7 @@ public class TestFlatFileInputFormat ext
conf = new Configuration();
job = new JobConf(conf);
fs = FileSystem.getLocal(conf);
- dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test.txt");
reporter = Reporter.NULL;
fs.delete(dir, true);
@@ -142,7 +142,7 @@ public class TestFlatFileInputFormat ext
conf = new Configuration();
job = new JobConf(conf);
fs = FileSystem.getLocal(conf);
- dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test.txt");
reporter = Reporter.NULL;
fs.delete(dir, true);
@@ -215,7 +215,7 @@ public class TestFlatFileInputFormat ext
*
* try { // // create job and filesystem and reporter and such. // conf = new
* Configuration(); job = new JobConf(conf); fs = FileSystem.getLocal(conf);
- * dir = new Path(System.getProperty("test.data.dir",".") + "/mapred"); file =
+ * dir = new Path(System.getProperty("test.tmp.dir",".") + "/mapred"); file =
* new Path(dir, "test.txt"); reporter = Reporter.NULL; fs.delete(dir, true);
*
* job.setClass(FlatFileInputFormat.SerializationContextFromConf.
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Sat Oct 12 16:37:47 2013
@@ -105,7 +105,7 @@ public class TestRCFile {
conf = new Configuration();
ColumnProjectionUtils.setReadAllColumns(conf);
fs = FileSystem.getLocal(conf);
- dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
+ dir = new Path(System.getProperty("test.tmp.dir", ".") + "/mapred");
file = new Path(dir, "test_rcfile");
cleanup();
// the SerDe part is from TestLazySimpleSerDe
@@ -611,7 +611,7 @@ public class TestRCFile {
@Test
public void testSync() throws IOException {
- Path testDir = new Path(System.getProperty("test.data.dir", ".")
+ Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
+ "/mapred/testsync");
Path testFile = new Path(testDir, "test_rcfile");
fs.delete(testFile, true);
@@ -678,7 +678,7 @@ public class TestRCFile {
private void writeThenReadByRecordReader(int intervalRecordCount,
int writeCount, int splitNumber, long minSplitSize, CompressionCodec codec)
throws IOException {
- Path testDir = new Path(System.getProperty("test.data.dir", ".")
+ Path testDir = new Path(System.getProperty("test.tmp.dir", ".")
+ "/mapred/testsmallfirstsplit");
Path testFile = new Path(testDir, "test_rcfile");
fs.delete(testFile, true);
@@ -750,7 +750,7 @@ public class TestRCFile {
Configuration conf = new Configuration();
LocalFileSystem fs = FileSystem.getLocal(conf);
// create an empty file (which is not a valid rcfile)
- Path path = new Path(System.getProperty("test.build.data", ".")
+ Path path = new Path(System.getProperty("test.tmp.dir", ".")
+ "/broken.rcfile");
fs.create(path).close();
// try to create RCFile.Reader
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java Sat Oct 12 16:37:47 2013
@@ -71,7 +71,7 @@ public class TestSymlinkTextInputFormat
conf = new Configuration();
job = new JobConf(conf);
fileSystem = FileSystem.getLocal(conf);
- testDir = new Path(System.getProperty("test.data.dir", System.getProperty(
+ testDir = new Path(System.getProperty("test.tmp.dir", System.getProperty(
"user.dir", new File(".").getAbsolutePath()))
+ "/TestSymlinkTextInputFormat");
reporter = Reporter.NULL;
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java Sat Oct 12 16:37:47 2013
@@ -34,16 +34,13 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Before;
import org.junit.Test;
public class TestFileDump {
- Path workDir = new Path(System.getProperty("test.tmp.dir",
- "target" + File.separator + "test" + File.separator + "tmp"));
- Path resourceDir = new Path(System.getProperty("test.build.resources",
- "ql" + File.separator + "src" + File.separator + "test" + File.separator + "resources"));
-
+ Path workDir = new Path(System.getProperty("test.tmp.dir"));
Configuration conf;
FileSystem fs;
Path testFilePath;
@@ -71,13 +68,16 @@ public class TestFileDump {
private static void checkOutput(String expected,
String actual) throws Exception {
BufferedReader eStream =
- new BufferedReader(new FileReader(expected));
+ new BufferedReader(new FileReader(HiveTestUtils.getFileFromClasspath(expected)));
BufferedReader aStream =
new BufferedReader(new FileReader(actual));
- String line = eStream.readLine();
- while (line != null) {
- assertEquals(line, aStream.readLine());
- line = eStream.readLine();
+ String expectedLine = eStream.readLine();
+ while (expectedLine != null) {
+ String actualLine = aStream.readLine();
+ System.out.println("actual: " + actualLine);
+ System.out.println("expected: " + expectedLine);
+ assertEquals(expectedLine, actualLine);
+ expectedLine = eStream.readLine();
}
assertNull(eStream.readLine());
assertNull(aStream.readLine());
@@ -110,8 +110,8 @@ public class TestFileDump {
}
writer.close();
PrintStream origOut = System.out;
- String outputFilename = File.separator + "orc-file-dump.out";
- FileOutputStream myOut = new FileOutputStream(workDir + outputFilename);
+ String outputFilename = "orc-file-dump.out";
+ FileOutputStream myOut = new FileOutputStream(workDir + File.separator + outputFilename);
// replace stdout and run command
System.setOut(new PrintStream(myOut));
@@ -120,7 +120,7 @@ public class TestFileDump {
System.setOut(origOut);
- checkOutput(resourceDir + outputFilename, workDir + outputFilename);
+ checkOutput(outputFilename, workDir + File.separator + outputFilename);
}
// Test that if the fraction of rows that have distinct strings is greater than the configured
@@ -164,8 +164,8 @@ public class TestFileDump {
}
writer.close();
PrintStream origOut = System.out;
- String outputFilename = File.separator + "orc-file-dump-dictionary-threshold.out";
- FileOutputStream myOut = new FileOutputStream(workDir + outputFilename);
+ String outputFilename = "orc-file-dump-dictionary-threshold.out";
+ FileOutputStream myOut = new FileOutputStream(workDir + File.separator + outputFilename);
// replace stdout and run command
System.setOut(new PrintStream(myOut));
@@ -173,6 +173,6 @@ public class TestFileDump {
System.out.flush();
System.setOut(origOut);
- checkOutput(resourceDir + outputFilename, workDir + outputFilename);
+ checkOutput(outputFilename, workDir + File.separator + outputFilename);
}
}
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java Sat Oct 12 16:37:47 2013
@@ -71,7 +71,7 @@ import org.junit.rules.TestName;
public class TestInputOutputFormat {
- Path workDir = new Path(System.getProperty("test.tmp.dir","target/test/tmp"));
+ Path workDir = new Path(System.getProperty("test.tmp.dir","target/tmp"));
public static class MyRow implements Writable {
int x;
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestMemoryManager.java Sat Oct 12 16:37:47 2013
@@ -51,7 +51,7 @@ public class TestMemoryManager {
NullCallback callback = new NullCallback();
long poolSize = mgr.getTotalMemoryPool();
assertEquals(Math.round(ManagementFactory.getMemoryMXBean().
- getHeapMemoryUsage().getMax() * 0.5f), poolSize);
+ getHeapMemoryUsage().getMax() * 0.5d), poolSize);
assertEquals(1.0, mgr.getAllocationScale(), 0.00001);
mgr.addWriter(new Path("p1"), 1000, callback);
assertEquals(1.0, mgr.getAllocationScale(), 0.00001);
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Sat Oct 12 16:37:47 2013
@@ -66,6 +66,7 @@ import org.apache.hadoop.io.FloatWritabl
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -205,10 +206,7 @@ public class TestOrcFile {
@Test
public void testReadFormat_0_11() throws Exception {
- Path resourceDir = new Path(System.getProperty("test.build.resources", "ql"
- + File.separator + "src" + File.separator + "test" + File.separator
- + "resources"));
- Path oldFilePath = new Path(resourceDir, "orc-file-11-format.orc");
+ Path oldFilePath = new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader = OrcFile.createReader(fs, oldFilePath);
int stripeCount = 0;
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java Sat Oct 12 16:37:47 2013
@@ -41,6 +41,7 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -571,10 +572,7 @@ public class TestOrcSerDeStats {
@Test(expected = ClassCastException.class)
public void testSerdeStatsOldFormat() throws Exception {
- Path resourceDir = new Path(System.getProperty("test.build.resources", "ql"
- + File.separator + "src" + File.separator + "test" + File.separator
- + "resources"));
- Path oldFilePath = new Path(resourceDir, "orc-file-11-format.orc");
+ Path oldFilePath = new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
Reader reader = OrcFile.createReader(fs, oldFilePath);
int stripeCount = 0;
Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/util/TestDosToUnix.java Sat Oct 12 16:37:47 2013
@@ -27,7 +27,7 @@ import junit.framework.TestCase;
public class TestDosToUnix extends TestCase {
- private static final String dataFile = System.getProperty("test.data.dir", ".") + "data_TestDosToUnix";
+ private static final String dataFile = System.getProperty("test.tmp.dir", ".") + "data_TestDosToUnix";
@Override
protected void setUp() throws Exception {
super.setUp();
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q Sat Oct 12 16:37:47 2013
@@ -1,9 +1,9 @@
set hive.exec.concatenate.check.index=true;
create table src_rc_concatenate_test(key int, value string) stored as rcfile;
-load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test;
-load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test;
-load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_1.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_2.rc' into table src_rc_concatenate_test;
+load data local inpath '../../data/files/smbbucket_3.rc' into table src_rc_concatenate_test;
show table extended like `src_rc_concatenate_test`;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_invalidspec.q Sat Oct 12 16:37:47 2013
@@ -2,7 +2,7 @@
create table if not exists alter_part_invalidspec(key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_invalidspec partition (year='1996', month='12');
alter table alter_part_invalidspec partition (year='1997') enable no_drop;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop.q Sat Oct 12 16:37:47 2013
@@ -2,8 +2,8 @@
create table if not exists alter_part_nodrop_part(key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_part partition (year='1996', month='12');
alter table alter_part_nodrop_part partition (year='1996') enable no_drop;
alter table alter_part_nodrop_part drop partition (year='1996');
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_nodrop_table.q Sat Oct 12 16:37:47 2013
@@ -2,8 +2,8 @@
create table if not exists alter_part_nodrop_table(key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_nodrop_table partition (year='1996', month='12');
alter table alter_part_nodrop_table partition (year='1996') enable no_drop;
drop table alter_part_nodrop_table;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_offline.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_offline.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_offline.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_partition_offline.q Sat Oct 12 16:37:47 2013
@@ -2,8 +2,8 @@
create table if not exists alter_part_offline (key string, value string ) partitioned by (year string, month string) stored as textfile ;
-- Load data
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10');
-load data local inpath '../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='10');
+load data local inpath '../../data/files/T1.txt' overwrite into table alter_part_offline partition (year='1996', month='12');
alter table alter_part_offline partition (year='1996') disable offline;
select * from alter_part_offline where year = '1996';
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure.q Sat Oct 12 16:37:47 2013
@@ -1,5 +1,5 @@
create table alter_rename_partition_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src ;
create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table alter_rename_partition partition (pCol1='old_part1:', pcol2='old_part2:') select col1 from alter_rename_partition_src ;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure2.q Sat Oct 12 16:37:47 2013
@@ -1,5 +1,5 @@
create table alter_rename_partition_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src ;
create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table alter_rename_partition partition (pCol1='old_part1:', pcol2='old_part2:') select col1 from alter_rename_partition_src ;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/alter_rename_partition_failure3.q Sat Oct 12 16:37:47 2013
@@ -1,5 +1,5 @@
create table alter_rename_partition_src ( col1 string ) stored as textfile ;
-load data local inpath '../data/files/test.dat' overwrite into table alter_rename_partition_src ;
+load data local inpath '../../data/files/test.dat' overwrite into table alter_rename_partition_src ;
create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile;
insert overwrite table alter_rename_partition partition (pCol1='old_part1:', pcol2='old_part2:') select col1 from alter_rename_partition_src ;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/archive_corrupt.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/archive_corrupt.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/archive_corrupt.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/archive_corrupt.q Sat Oct 12 16:37:47 2013
@@ -14,5 +14,5 @@ create table tstsrcpart like srcpart;
-- to be thrown during the LOAD step. This former behavior is tested
-- in clientpositive/archive_corrupt.q
-load data local inpath '../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11');
+load data local inpath '../../data/files/archive_corrupt.rc' overwrite into table tstsrcpart partition (ds='2008-04-08', hr='11');
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_mismatch1.q Sat Oct 12 16:37:47 2013
@@ -1,19 +1,19 @@
CREATE TABLE srcbucket_mapjoin_part (key int, value string)
partitioned by (ds string) CLUSTERED BY (key) INTO 3 BUCKETS
STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket20.txt'
+load data local inpath '../../data/files/srcbucket20.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket21.txt'
+load data local inpath '../../data/files/srcbucket21.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket22.txt'
+load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08');
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string)
partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
STORED AS TEXTFILE;
-load data local inpath '../data/files/srcbucket22.txt'
+load data local inpath '../../data/files/srcbucket22.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-load data local inpath '../data/files/srcbucket23.txt'
+load data local inpath '../../data/files/srcbucket23.txt'
INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08');
-- The number of buckets in the 2 tables above (being joined later) dont match.
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_1.q Sat Oct 12 16:37:47 2013
@@ -8,10 +8,10 @@ into 2 BUCKETS stored as textfile;
create table table2(key string, value string) clustered by (value, key)
into 2 BUCKETS stored as textfile;
-load data local inpath '../data/files/T1.txt' overwrite into table table1;
+load data local inpath '../../data/files/T1.txt' overwrite into table table1;
-load data local inpath '../data/files/T1.txt' overwrite into table table2;
-load data local inpath '../data/files/T2.txt' overwrite into table table2;
+load data local inpath '../../data/files/T1.txt' overwrite into table table2;
+load data local inpath '../../data/files/T2.txt' overwrite into table table2;
set hive.optimize.bucketmapjoin = true;
set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/bucket_mapjoin_wrong_table_metadata_2.q Sat Oct 12 16:37:47 2013
@@ -8,13 +8,13 @@ into 2 BUCKETS stored as textfile;
create table table2(key string, value string) clustered by (value, key)
into 2 BUCKETS stored as textfile;
-load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='1');
-load data local inpath '../data/files/T2.txt' overwrite into table table1 partition (ds='1');
+load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='1');
+load data local inpath '../../data/files/T2.txt' overwrite into table table1 partition (ds='1');
-load data local inpath '../data/files/T1.txt' overwrite into table table1 partition (ds='2');
+load data local inpath '../../data/files/T1.txt' overwrite into table table1 partition (ds='2');
-load data local inpath '../data/files/T1.txt' overwrite into table table2;
-load data local inpath '../data/files/T2.txt' overwrite into table table2;
+load data local inpath '../../data/files/T1.txt' overwrite into table table2;
+load data local inpath '../../data/files/T2.txt' overwrite into table table2;
set hive.optimize.bucketmapjoin = true;
set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_dp.q Sat Oct 12 16:37:47 2013
@@ -3,12 +3,12 @@ DROP TABLE Employee_Part;
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- dynamic partitioning syntax
explain
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_incorrect_num_keys.q Sat Oct 12 16:37:47 2013
@@ -3,12 +3,12 @@ DROP TABLE Employee_Part;
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- don't specify all partitioning keys
explain
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q Sat Oct 12 16:37:47 2013
@@ -3,12 +3,12 @@ DROP TABLE Employee_Part;
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- specify invalid values for the partitioning keys
explain
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_partlvl_multiple_part_clause.q Sat Oct 12 16:37:47 2013
@@ -3,12 +3,12 @@ DROP TABLE Employee_Part;
CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
-LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK');
+LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK');
-- specify partitioning clause multiple times
explain
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl.q Sat Oct 12 16:37:47 2013
@@ -13,7 +13,7 @@ CREATE TABLE UserVisits_web_text_none (
avgTimeOnSite int)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
+LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
explain
analyze table UserVisits_web_text_none compute statistics for columns destIP;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_complex_type.q Sat Oct 12 16:37:47 2013
@@ -8,7 +8,7 @@ CREATE TABLE table_complex_type (
d MAP<STRING,ARRAY<STRING>>
) STORED AS TEXTFILE;
-LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type;
+LOAD DATA LOCAL INPATH '../../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type;
explain
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/columnstats_tbllvl_incorrect_column.q Sat Oct 12 16:37:47 2013
@@ -13,7 +13,7 @@ CREATE TABLE UserVisits_web_text_none (
avgTimeOnSite int)
row format delimited fields terminated by '|' stored as textfile;
-LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
+LOAD DATA LOCAL INPATH "../../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none;
explain
analyze table UserVisits_web_text_none compute statistics for columns destIP;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/deletejar.q Sat Oct 12 16:37:47 2013
@@ -1,4 +1,4 @@
-ADD JAR ../build/ql/test/TestSerDe.jar;
-DELETE JAR ../build/ql/test/TestSerDe.jar;
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-internal/${system:hive.version}/hive-internal-${system:hive.version}-test-serde.jar;
+DELETE JAR ${system:maven.local.repository}/org/apache/hive/hive-internal/${system:hive.version}/hive-internal-${system:hive.version}-test-serde.jar;
CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/dynamic_partitions_with_whitelist.q Sat Oct 12 16:37:47 2013
@@ -8,7 +8,7 @@ create table source_table like srcpart;
create table dest_table like srcpart;
-load data local inpath '../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11);
+load data local inpath '../../data/files/srcbucket20.txt' INTO TABLE source_table partition(ds='2008-04-08', hr=11);
-- Tests creating dynamic partitions with characters not in the whitelist (i.e. 9)
-- If the directory is not empty the hook will throw an error, instead the error should come from the metastore
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q Sat Oct 12 16:37:47 2013
@@ -4,9 +4,9 @@ set hive.test.mode.prefix=;
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'nosuchschema://nosuchauthority/ql/test/data/exports/exim_department';
drop table exim_department;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q Sat Oct 12 16:37:47 2013
@@ -4,9 +4,9 @@ set hive.test.mode.prefix=;
create table exim_department ( dep_id int comment "department id")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_department/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -16,9 +16,9 @@ use importer;
create table exim_department ( dep_id int comment "department identifier")
stored as textfile
tblproperties("maker"="krishna");
-load data local inpath "../data/files/test.dat" into table exim_department;
+load data local inpath "../../data/files/test.dat" into table exim_department;
import from 'ql/test/data/exports/exim_department';
drop table exim_department;
-dfs -rmr ../build/ql/test/data/exports/exim_department;
+dfs -rmr target/tmp/ql/test/data/exports/exim_department;
drop database importer;
Modified: hive/branches/maven/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q?rev=1531557&r1=1531556&r2=1531557&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q Sat Oct 12 16:37:47 2013
@@ -6,16 +6,16 @@ create table exim_employee ( emp_id int
partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
stored as textfile
tblproperties("creator"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="tn");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -27,12 +27,12 @@ create table exim_employee ( emp_id int
partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
stored as textfile
tblproperties("maker"="krishna");
-load data local inpath "../data/files/test.dat"
+load data local inpath "../../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
import from 'ql/test/data/exports/exim_employee';
describe extended exim_employee;
select * from exim_employee;
drop table exim_employee;
-dfs -rmr ../build/ql/test/data/exports/exim_employee;
+dfs -rmr target/tmp/ql/test/data/exports/exim_employee;
drop database importer;