You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ke...@apache.org on 2012/07/25 08:19:21 UTC
svn commit: r1365460 - in /hive/trunk:
common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
common/src/java/org/apache/hadoop/hive/common/FileUtils.java
ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
Author: kevinwilfong
Date: Wed Jul 25 06:19:21 2012
New Revision: 1365460
URL: http://svn.apache.org/viewvc?rev=1365460&view=rev
Log:
HIVE-3295. HIVE-3128 introduced bug causing dynamic partitioning to fail. (kevinwilfong reviewed by njain, ashutoshc)
Added:
hive/trunk/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java?rev=1365460&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java Wed Jul 25 06:19:21 2012
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
+import org.apache.commons.compress.utils.IOUtils;
+
+/**
+ * This class contains methods used for the purposes of compression, this class
+ * should not be accessed from code run in Hadoop.
+ */
+public class CompressionUtils {
+
+ /**
+ * Archive all the files in the inputFiles into outputFile
+ *
+ * @param inputFiles
+ * @param outputFile
+ * @throws IOException
+ */
+ public static void tar(String parentDir, String[] inputFiles, String outputFile)
+ throws IOException {
+
+ FileOutputStream out = null;
+ try {
+ out = new FileOutputStream(new File(parentDir, outputFile));
+ TarArchiveOutputStream tOut = new TarArchiveOutputStream(
+ new GzipCompressorOutputStream(new BufferedOutputStream(out)));
+
+ for (int i = 0; i < inputFiles.length; i++) {
+ File f = new File(parentDir, inputFiles[i]);
+ TarArchiveEntry tarEntry = new TarArchiveEntry(f, f.getName());
+ tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
+ tOut.putArchiveEntry(tarEntry);
+ FileInputStream input = new FileInputStream(f);
+ try {
+ IOUtils.copy(input, tOut); // copy with 8K buffer, not close
+ } finally {
+ input.close();
+ }
+ tOut.closeArchiveEntry();
+ }
+ tOut.close(); // finishes inside
+ } finally {
+ // TarArchiveOutputStream seemed not to close files properly in error situation
+ org.apache.hadoop.io.IOUtils.closeStream(out);
+ }
+ }
+}
Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1365460&r1=1365459&r2=1365460&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Wed Jul 25 06:19:21 2012
@@ -18,19 +18,11 @@
package org.apache.hadoop.hive.common;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.util.BitSet;
import java.util.List;
-import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
-import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
-import org.apache.commons.compress.utils.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -245,40 +237,4 @@ public final class FileUtils {
results.add(fileStatus);
}
}
-
- /**
- * Archive all the files in the inputFiles into outputFile
- *
- * @param inputFiles
- * @param outputFile
- * @throws IOException
- */
- public static void tar(String parentDir, String[] inputFiles, String outputFile)
- throws IOException {
-
- FileOutputStream out = null;
- try {
- out = new FileOutputStream(new File(parentDir, outputFile));
- TarArchiveOutputStream tOut = new TarArchiveOutputStream(
- new GzipCompressorOutputStream(new BufferedOutputStream(out)));
-
- for (int i = 0; i < inputFiles.length; i++) {
- File f = new File(parentDir, inputFiles[i]);
- TarArchiveEntry tarEntry = new TarArchiveEntry(f, f.getName());
- tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
- tOut.putArchiveEntry(tarEntry);
- FileInputStream input = new FileInputStream(f);
- try {
- IOUtils.copy(input, tOut); // copy with 8K buffer, not close
- } finally {
- input.close();
- }
- tOut.closeArchiveEntry();
- }
- tOut.close(); // finishes inside
- } finally {
- // TarArchiveOutputStream seemed not to close files properly in error situation
- org.apache.hadoop.io.IOUtils.closeStream(out);
- }
- }
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=1365460&r1=1365459&r2=1365460&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Wed Jul 25 06:19:21 2012
@@ -46,7 +46,7 @@ import org.apache.hadoop.filecache.Distr
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.common.CompressionUtils;
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -387,7 +387,7 @@ public class ExecDriver extends Task<Map
String archiveFileName = Utilities.generateTarFileName(stageId);
localwork.setStageID(stageId);
- FileUtils.tar(parentDir, fileNames,archiveFileName);
+ CompressionUtils.tar(parentDir, fileNames,archiveFileName);
Path archivePath = new Path(archiveFileURI);
LOG.info("Archive "+ hashtableFiles.length+" hash table files to " + archiveFileURI);