You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sz...@apache.org on 2015/01/29 00:40:05 UTC
svn commit: r1655500 - in /hive/trunk:
itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
Author: szehon
Date: Wed Jan 28 23:40:05 2015
New Revision: 1655500
URL: http://svn.apache.org/r1655500
Log:
HIVE-9474 : truncate table changes permissions on the target (Aihua Xu via Szehon)
Modified:
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java?rev=1655500&r1=1655499&r2=1655500&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java Wed Jan 28 23:40:05 2015
@@ -22,7 +22,7 @@ import java.net.URI;
import java.util.ArrayList;
import java.util.List;
-import junit.framework.Assert;
+import org.junit.Assert;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -645,6 +645,40 @@ public abstract class FolderPermissionBa
}
}
+ /**
+ * Tests the permission to the table doesn't change after the truncation
+ * @throws Exception
+ */
+ @Test
+ public void testTruncateTable() throws Exception {
+ String tableName = "truncatetable";
+ String partition = warehouseDir + "/" + tableName + "/part1=1";
+
+ CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key STRING, value STRING) PARTITIONED BY (part1 INT)");
+ Assert.assertEquals(0, ret.getResponseCode());
+
+ setPermission(warehouseDir + "/" + tableName);
+
+ ret = driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
+ Assert.assertEquals(0, ret.getResponseCode());
+
+ assertExistence(warehouseDir + "/" + tableName);
+
+ verifyPermission(warehouseDir + "/" + tableName);
+ verifyPermission(partition);
+
+ ret = driver.run("TRUNCATE TABLE " + tableName);
+ Assert.assertEquals(0, ret.getResponseCode());
+
+ ret = driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'");
+ Assert.assertEquals(0, ret.getResponseCode());
+
+ verifyPermission(warehouseDir + "/" + tableName);
+
+ assertExistence(partition);
+ verifyPermission(partition);
+ }
+
private void verifySinglePartition(String tableLoc, int index) throws Exception {
verifyPermission(tableLoc + "/part1=1", index);
verifyPermission(tableLoc + "/part1=2", index);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1655500&r1=1655499&r2=1655500&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Jan 28 23:40:05 2015
@@ -169,6 +169,9 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatus;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.tools.HadoopArchives;
import org.apache.hadoop.util.ReflectionUtils;
@@ -4228,10 +4231,18 @@ public class DDLTask extends Task<DDLWor
try {
// this is not transactional
+ HadoopShims shim = ShimLoader.getHadoopShims();
for (Path location : getLocations(db, table, partSpec)) {
FileSystem fs = location.getFileSystem(conf);
+
+ HdfsFileStatus fullFileStatus = shim.getFullFileStatus(conf, fs, location);
fs.delete(location, true);
fs.mkdirs(location);
+ try {
+ shim.setFullFileStatus(conf, fullFileStatus, fs, location);
+ } catch (Exception e) {
+ LOG.warn("Error setting permissions of " + location, e);
+ }
}
} catch (Exception e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR);