You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by we...@apache.org on 2017/05/08 20:43:00 UTC
[10/51] [partial] hive git commit: Revert "HIVE-14671 : merge master
into hive-14535 (Wei Zheng)"
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index df05af1..d6460cd 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.AggrStats;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
@@ -893,13 +892,6 @@ public class DummyRawStoreForJdoConnection implements RawStore {
}
@Override
- public Map<String, ColumnStatisticsObj> getAggrColStatsForTablePartitions(String dbName,
- String tableName) throws MetaException, NoSuchObjectException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
public void createTableWrite(Table tbl, long writeId, char state, long heartbeat) {
}
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java b/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
index 2166c20..f8eed18 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
@@ -42,6 +42,7 @@ public class TestHiveMetaStoreTimeout {
public static void setUp() throws Exception {
HiveMetaStore.TEST_TIMEOUT_ENABLED = true;
hiveConf = new HiveConf(TestHiveMetaStoreTimeout.class);
+ hiveConf.setBoolean(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS.varname, true);
hiveConf.set(HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS.varname,
MockPartitionExpressionForMetastore.class.getCanonicalName());
hiveConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 10 * 1000,
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
index d008c75..aaa03fb 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hive.common.metrics.metrics2.MetricsReporting;
import org.apache.hadoop.hive.common.metrics.MetricsTestUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
@@ -42,9 +41,6 @@ import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NotificationEvent;
-import org.apache.hadoop.hive.metastore.api.NotificationEventRequest;
-import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
@@ -55,7 +51,6 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.metastore.messaging.EventMessage;
import org.apache.hadoop.hive.metastore.model.MTableWrite;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.serde.serdeConstants;
@@ -67,12 +62,10 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
-import javax.jdo.Query;
public class TestObjectStore {
private ObjectStore objectStore = null;
@@ -142,56 +135,6 @@ public class TestObjectStore {
}
/**
- * Test notification operations
- */
- @Test
- public void testNotificationOps() throws InterruptedException {
- final int NO_EVENT_ID = 0;
- final int FIRST_EVENT_ID = 1;
- final int SECOND_EVENT_ID = 2;
-
- NotificationEvent event =
- new NotificationEvent(0, 0, EventMessage.EventType.CREATE_DATABASE.toString(), "");
- NotificationEventResponse eventResponse;
- CurrentNotificationEventId eventId;
-
- // Verify that there is no notifications available yet
- eventId = objectStore.getCurrentNotificationEventId();
- Assert.assertEquals(NO_EVENT_ID, eventId.getEventId());
-
- // Verify that addNotificationEvent() updates the NotificationEvent with the new event ID
- objectStore.addNotificationEvent(event);
- Assert.assertEquals(FIRST_EVENT_ID, event.getEventId());
- objectStore.addNotificationEvent(event);
- Assert.assertEquals(SECOND_EVENT_ID, event.getEventId());
-
- // Verify that objectStore fetches the latest notification event ID
- eventId = objectStore.getCurrentNotificationEventId();
- Assert.assertEquals(SECOND_EVENT_ID, eventId.getEventId());
-
- // Verify that getNextNotification() returns all events
- eventResponse = objectStore.getNextNotification(new NotificationEventRequest());
- Assert.assertEquals(2, eventResponse.getEventsSize());
- Assert.assertEquals(FIRST_EVENT_ID, eventResponse.getEvents().get(0).getEventId());
- Assert.assertEquals(SECOND_EVENT_ID, eventResponse.getEvents().get(1).getEventId());
-
- // Verify that getNextNotification(last) returns events after a specified event
- eventResponse = objectStore.getNextNotification(new NotificationEventRequest(FIRST_EVENT_ID));
- Assert.assertEquals(1, eventResponse.getEventsSize());
- Assert.assertEquals(SECOND_EVENT_ID, eventResponse.getEvents().get(0).getEventId());
-
- // Verify that getNextNotification(last) returns zero events if there are no more notifications available
- eventResponse = objectStore.getNextNotification(new NotificationEventRequest(SECOND_EVENT_ID));
- Assert.assertEquals(0, eventResponse.getEventsSize());
-
- // Verify that cleanNotificationEvents() cleans up all old notifications
- Thread.sleep(1);
- objectStore.cleanNotificationEvents(1);
- eventResponse = objectStore.getNextNotification(new NotificationEventRequest());
- Assert.assertEquals(0, eventResponse.getEventsSize());
- }
-
- /**
* Test database operations
*/
@Test
@@ -582,15 +525,4 @@ public class TestObjectStore {
} catch (NoSuchObjectException e) {
}
}
-
- @Test
- public void testQueryCloseOnError() throws Exception {
- ObjectStore spy = Mockito.spy(objectStore);
- spy.getAllDatabases();
- spy.getAllFunctions();
- spy.getAllTables(DB1);
- spy.getPartitionCount();
- Mockito.verify(spy, Mockito.times(3))
- .rollbackAndCleanup(Mockito.anyBoolean(), Mockito.<Query>anyObject());
- }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
index a8c7ac3..9acf9d7 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.thrift.TException;
-public class VerifyingObjectStore extends ObjectStore {
+class VerifyingObjectStore extends ObjectStore {
private static final Logger LOG = LoggerFactory.getLogger(VerifyingObjectStore.class);
public VerifyingObjectStore() {
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java
deleted file mode 100644
index 0ab20d6..0000000
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.cache;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.ObjectStore;
-import org.apache.hadoop.hive.metastore.TestObjectStore.MockPartitionExpressionProxy;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestCachedStore {
-
- private CachedStore cachedStore = new CachedStore();
-
- @Before
- public void setUp() throws Exception {
- HiveConf conf = new HiveConf();
- conf.setVar(HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS, MockPartitionExpressionProxy.class.getName());
-
- ObjectStore objectStore = new ObjectStore();
- objectStore.setConf(conf);
-
- cachedStore.setRawStore(objectStore);
-
- SharedCache.getDatabaseCache().clear();
- SharedCache.getTableCache().clear();
- SharedCache.getPartitionCache().clear();
- SharedCache.getSdCache().clear();
- SharedCache.getPartitionColStatsCache().clear();
- }
-
- @Test
- public void testSharedStoreDb() {
- Database db1 = new Database();
- Database db2 = new Database();
- Database db3 = new Database();
- Database newDb1 = new Database();
- newDb1.setName("db1");
-
- SharedCache.addDatabaseToCache("db1", db1);
- SharedCache.addDatabaseToCache("db2", db2);
- SharedCache.addDatabaseToCache("db3", db3);
-
- Assert.assertEquals(SharedCache.getCachedDatabaseCount(), 3);
-
- SharedCache.alterDatabaseInCache("db1", newDb1);
-
- Assert.assertEquals(SharedCache.getCachedDatabaseCount(), 3);
-
- SharedCache.removeDatabaseFromCache("db2");
-
- Assert.assertEquals(SharedCache.getCachedDatabaseCount(), 2);
-
- List<String> dbs = SharedCache.listCachedDatabases();
- Assert.assertEquals(dbs.size(), 2);
- Assert.assertTrue(dbs.contains("db1"));
- Assert.assertTrue(dbs.contains("db3"));
- }
-
- @Test
- public void testSharedStoreTable() {
- Table tbl1 = new Table();
- StorageDescriptor sd1 = new StorageDescriptor();
- List<FieldSchema> cols1 = new ArrayList<FieldSchema>();
- cols1.add(new FieldSchema("col1", "int", ""));
- Map<String, String> params1 = new HashMap<String, String>();
- params1.put("key", "value");
- sd1.setCols(cols1);
- sd1.setParameters(params1);
- sd1.setLocation("loc1");
- tbl1.setSd(sd1);
- tbl1.setPartitionKeys(new ArrayList<FieldSchema>());
-
- Table tbl2 = new Table();
- StorageDescriptor sd2 = new StorageDescriptor();
- List<FieldSchema> cols2 = new ArrayList<FieldSchema>();
- cols2.add(new FieldSchema("col1", "int", ""));
- Map<String, String> params2 = new HashMap<String, String>();
- params2.put("key", "value");
- sd2.setCols(cols2);
- sd2.setParameters(params2);
- sd2.setLocation("loc2");
- tbl2.setSd(sd2);
- tbl2.setPartitionKeys(new ArrayList<FieldSchema>());
-
- Table tbl3 = new Table();
- StorageDescriptor sd3 = new StorageDescriptor();
- List<FieldSchema> cols3 = new ArrayList<FieldSchema>();
- cols3.add(new FieldSchema("col3", "int", ""));
- Map<String, String> params3 = new HashMap<String, String>();
- params3.put("key2", "value2");
- sd3.setCols(cols3);
- sd3.setParameters(params3);
- sd3.setLocation("loc3");
- tbl3.setSd(sd3);
- tbl3.setPartitionKeys(new ArrayList<FieldSchema>());
-
- Table newTbl1 = new Table();
- newTbl1.setDbName("db2");
- newTbl1.setTableName("tbl1");
- StorageDescriptor newSd1 = new StorageDescriptor();
- List<FieldSchema> newCols1 = new ArrayList<FieldSchema>();
- newCols1.add(new FieldSchema("newcol1", "int", ""));
- Map<String, String> newParams1 = new HashMap<String, String>();
- newParams1.put("key", "value");
- newSd1.setCols(newCols1);
- newSd1.setParameters(params1);
- newSd1.setLocation("loc1");
- newTbl1.setSd(newSd1);
- newTbl1.setPartitionKeys(new ArrayList<FieldSchema>());
-
- SharedCache.addTableToCache("db1", "tbl1", tbl1);
- SharedCache.addTableToCache("db1", "tbl2", tbl2);
- SharedCache.addTableToCache("db1", "tbl3", tbl3);
- SharedCache.addTableToCache("db2", "tbl1", tbl1);
-
- Assert.assertEquals(SharedCache.getCachedTableCount(), 4);
- Assert.assertEquals(SharedCache.getSdCache().size(), 2);
-
- Table t = SharedCache.getTableFromCache("db1", "tbl1");
- Assert.assertEquals(t.getSd().getLocation(), "loc1");
-
- SharedCache.removeTableFromCache("db1", "tbl1");
- Assert.assertEquals(SharedCache.getCachedTableCount(), 3);
- Assert.assertEquals(SharedCache.getSdCache().size(), 2);
-
- SharedCache.alterTableInCache("db2", "tbl1", newTbl1);
- Assert.assertEquals(SharedCache.getCachedTableCount(), 3);
- Assert.assertEquals(SharedCache.getSdCache().size(), 3);
-
- SharedCache.removeTableFromCache("db1", "tbl2");
- Assert.assertEquals(SharedCache.getCachedTableCount(), 2);
- Assert.assertEquals(SharedCache.getSdCache().size(), 2);
- }
-
- @Test
- public void testSharedStorePartition() {
- Partition part1 = new Partition();
- StorageDescriptor sd1 = new StorageDescriptor();
- List<FieldSchema> cols1 = new ArrayList<FieldSchema>();
- cols1.add(new FieldSchema("col1", "int", ""));
- Map<String, String> params1 = new HashMap<String, String>();
- params1.put("key", "value");
- sd1.setCols(cols1);
- sd1.setParameters(params1);
- sd1.setLocation("loc1");
- part1.setSd(sd1);
- part1.setValues(Arrays.asList("201701"));
-
- Partition part2 = new Partition();
- StorageDescriptor sd2 = new StorageDescriptor();
- List<FieldSchema> cols2 = new ArrayList<FieldSchema>();
- cols2.add(new FieldSchema("col1", "int", ""));
- Map<String, String> params2 = new HashMap<String, String>();
- params2.put("key", "value");
- sd2.setCols(cols2);
- sd2.setParameters(params2);
- sd2.setLocation("loc2");
- part2.setSd(sd2);
- part2.setValues(Arrays.asList("201702"));
-
- Partition part3 = new Partition();
- StorageDescriptor sd3 = new StorageDescriptor();
- List<FieldSchema> cols3 = new ArrayList<FieldSchema>();
- cols3.add(new FieldSchema("col3", "int", ""));
- Map<String, String> params3 = new HashMap<String, String>();
- params3.put("key2", "value2");
- sd3.setCols(cols3);
- sd3.setParameters(params3);
- sd3.setLocation("loc3");
- part3.setSd(sd3);
- part3.setValues(Arrays.asList("201703"));
-
- Partition newPart1 = new Partition();
- newPart1.setDbName("db1");
- newPart1.setTableName("tbl1");
- StorageDescriptor newSd1 = new StorageDescriptor();
- List<FieldSchema> newCols1 = new ArrayList<FieldSchema>();
- newCols1.add(new FieldSchema("newcol1", "int", ""));
- Map<String, String> newParams1 = new HashMap<String, String>();
- newParams1.put("key", "value");
- newSd1.setCols(newCols1);
- newSd1.setParameters(params1);
- newSd1.setLocation("loc1");
- newPart1.setSd(newSd1);
- newPart1.setValues(Arrays.asList("201701"));
-
- SharedCache.addPartitionToCache("db1", "tbl1", part1);
- SharedCache.addPartitionToCache("db1", "tbl1", part2);
- SharedCache.addPartitionToCache("db1", "tbl1", part3);
- SharedCache.addPartitionToCache("db1", "tbl2", part1);
-
- Assert.assertEquals(SharedCache.getCachedPartitionCount(), 4);
- Assert.assertEquals(SharedCache.getSdCache().size(), 2);
-
- Partition t = SharedCache.getPartitionFromCache("db1", "tbl1", Arrays.asList("201701"));
- Assert.assertEquals(t.getSd().getLocation(), "loc1");
-
- SharedCache.removePartitionFromCache("db1", "tbl2", Arrays.asList("201701"));
- Assert.assertEquals(SharedCache.getCachedPartitionCount(), 3);
- Assert.assertEquals(SharedCache.getSdCache().size(), 2);
-
- SharedCache.alterPartitionInCache("db1", "tbl1", Arrays.asList("201701"), newPart1);
- Assert.assertEquals(SharedCache.getCachedPartitionCount(), 3);
- Assert.assertEquals(SharedCache.getSdCache().size(), 3);
-
- SharedCache.removePartitionFromCache("db1", "tbl1", Arrays.asList("201702"));
- Assert.assertEquals(SharedCache.getCachedPartitionCount(), 2);
- Assert.assertEquals(SharedCache.getSdCache().size(), 2);
- }
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java b/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java
deleted file mode 100644
index c278338..0000000
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.metastore.messaging.json;
-
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.json.JSONException;
-import org.junit.Test;
-import org.skyscreamer.jsonassert.JSONAssert;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.junit.Assert.*;
-
-public class JSONMessageDeserializerTest {
-
- public static class MyClass {
- @JsonProperty
- private int a;
- @JsonProperty
- private Map<String, String> map;
- private long l;
- private String shouldNotSerialize = "shouldNotSerialize";
-
- //for jackson to instantiate
- MyClass() {
- }
-
- MyClass(int a, Map<String, String> map, long l) {
- this.a = a;
- this.map = map;
- this.l = l;
- }
-
- @JsonProperty
- long getL() {
- return l;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
-
- MyClass myClass = (MyClass) o;
-
- if (a != myClass.a)
- return false;
- if (l != myClass.l)
- return false;
- if (!map.equals(myClass.map))
- return false;
- return shouldNotSerialize.equals(myClass.shouldNotSerialize);
- }
-
- @Override
- public int hashCode() {
- int result = a;
- result = 31 * result + map.hashCode();
- result = 31 * result + (int) (l ^ (l >>> 32));
- result = 31 * result + shouldNotSerialize.hashCode();
- return result;
- }
- }
-
- @Test
- public void shouldNotSerializePropertiesNotAnnotated() throws IOException, JSONException {
- MyClass obj = new MyClass(Integer.MAX_VALUE, new HashMap<String, String>() {{
- put("a", "a");
- put("b", "b");
- }}, Long.MAX_VALUE);
- String json = JSONMessageDeserializer.mapper.writeValueAsString(obj);
- JSONAssert.assertEquals(
- "{\"a\":2147483647,\"map\":{\"b\":\"b\",\"a\":\"a\"},\"l\":9223372036854775807}", json,
- false);
- }
-
- @Test
- public void shouldDeserializeJsonStringToObject() throws IOException {
- String json = "{\"a\":47,\"map\":{\"a\":\"a\",\"b\":\"a value for b\"},\"l\":98}";
- MyClass actual = JSONMessageDeserializer.mapper.readValue(json, MyClass.class);
- MyClass expected = new MyClass(47, new HashMap<String, String>() {{
- put("a", "a");
- put("b", "a value for b");
- }}, 98L);
- assertEquals(expected, actual);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java
index ec653ed..79ccc6b 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java
@@ -22,64 +22,52 @@ import org.apache.hadoop.hive.common.ValidTxnList;
import org.junit.Assert;
import org.junit.Test;
-import java.util.BitSet;
-
public class TestValidCompactorTxnList {
@Test
public void minTxnHigh() {
- BitSet bitSet = new BitSet(2);
- bitSet.set(0, bitSet.length());
- ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 4}, bitSet, 2);
+ ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 4}, 2);
ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp);
}
@Test
public void maxTxnLow() {
- BitSet bitSet = new BitSet(2);
- bitSet.set(0, bitSet.length());
- ValidTxnList txns = new ValidCompactorTxnList(new long[]{13, 14}, bitSet, 12);
+ ValidTxnList txns = new ValidCompactorTxnList(new long[]{13, 14}, 12);
ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9);
Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp);
}
@Test
public void minTxnHighNoExceptions() {
- ValidTxnList txns = new ValidCompactorTxnList(new long[0], new BitSet(), 5);
+ ValidTxnList txns = new ValidCompactorTxnList(new long[0], 5);
ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp);
}
@Test
public void maxTxnLowNoExceptions() {
- ValidTxnList txns = new ValidCompactorTxnList(new long[0], new BitSet(), 15);
+ ValidTxnList txns = new ValidCompactorTxnList(new long[0], 15);
ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9);
Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp);
}
@Test
public void exceptionsAllBelow() {
- BitSet bitSet = new BitSet(2);
- bitSet.set(0, bitSet.length());
- ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 6}, bitSet, 3);
+ ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 6}, 3);
ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp);
}
@Test
public void exceptionsInMidst() {
- BitSet bitSet = new BitSet(1);
- bitSet.set(0, bitSet.length());
- ValidTxnList txns = new ValidCompactorTxnList(new long[]{8}, bitSet, 7);
+ ValidTxnList txns = new ValidCompactorTxnList(new long[]{8}, 7);
ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9);
Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp);
}
@Test
public void exceptionsAbveHighWaterMark() {
- BitSet bitSet = new BitSet(4);
- bitSet.set(0, bitSet.length());
- ValidTxnList txns = new ValidCompactorTxnList(new long[]{8, 11, 17, 29}, bitSet, 15);
+ ValidTxnList txns = new ValidCompactorTxnList(new long[]{8, 11, 17, 29}, 15);
Assert.assertArrayEquals("", new long[]{8, 11}, txns.getInvalidTransactions());
ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9);
Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp);
@@ -89,19 +77,17 @@ public class TestValidCompactorTxnList {
@Test
public void writeToString() {
- BitSet bitSet = new BitSet(4);
- bitSet.set(0, bitSet.length());
- ValidTxnList txns = new ValidCompactorTxnList(new long[]{7, 9, 10, Long.MAX_VALUE}, bitSet, 8);
- Assert.assertEquals("8:" + Long.MAX_VALUE + ":7:", txns.writeToString());
+ ValidTxnList txns = new ValidCompactorTxnList(new long[]{9, 7, 10, Long.MAX_VALUE}, 8);
+ Assert.assertEquals("8:" + Long.MAX_VALUE + ":7", txns.writeToString());
txns = new ValidCompactorTxnList();
- Assert.assertEquals(Long.toString(Long.MAX_VALUE) + ":" + Long.MAX_VALUE + "::", txns.writeToString());
- txns = new ValidCompactorTxnList(new long[0], new BitSet(), 23);
- Assert.assertEquals("23:" + Long.MAX_VALUE + "::", txns.writeToString());
+ Assert.assertEquals(Long.toString(Long.MAX_VALUE) + ":" + Long.MAX_VALUE + ":", txns.writeToString());
+ txns = new ValidCompactorTxnList(new long[0], 23);
+ Assert.assertEquals("23:" + Long.MAX_VALUE + ":", txns.writeToString());
}
@Test
public void readFromString() {
- ValidCompactorTxnList txns = new ValidCompactorTxnList("37:" + Long.MAX_VALUE + "::7,9,10");
+ ValidCompactorTxnList txns = new ValidCompactorTxnList("37:" + Long.MAX_VALUE + ":7:9:10");
Assert.assertEquals(37L, txns.getHighWatermark());
Assert.assertEquals(Long.MAX_VALUE, txns.getMinOpenTxn());
Assert.assertArrayEquals(new long[]{7L, 9L, 10L}, txns.getInvalidTransactions());
@@ -110,27 +96,4 @@ public class TestValidCompactorTxnList {
Assert.assertEquals(Long.MAX_VALUE, txns.getMinOpenTxn());
Assert.assertEquals(0, txns.getInvalidTransactions().length);
}
-
- @Test
- public void testAbortedTxn() throws Exception {
- ValidCompactorTxnList txnList = new ValidCompactorTxnList("5:4::1,2,3");
- Assert.assertEquals(5L, txnList.getHighWatermark());
- Assert.assertEquals(4, txnList.getMinOpenTxn());
- Assert.assertArrayEquals(new long[]{1L, 2L, 3L}, txnList.getInvalidTransactions());
- }
-
- @Test
- public void testAbortedRange() throws Exception {
- ValidCompactorTxnList txnList = new ValidCompactorTxnList("11:4::5,6,7,8");
- ValidTxnList.RangeResponse rsp = txnList.isTxnRangeAborted(1L, 3L);
- Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp);
- rsp = txnList.isTxnRangeAborted(9L, 10L);
- Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp);
- rsp = txnList.isTxnRangeAborted(6L, 7L);
- Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp);
- rsp = txnList.isTxnRangeAborted(4L, 6L);
- Assert.assertEquals(ValidTxnList.RangeResponse.SOME, rsp);
- rsp = txnList.isTxnRangeAborted(6L, 13L);
- Assert.assertEquals(ValidTxnList.RangeResponse.SOME, rsp);
- }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/packaging/pom.xml
----------------------------------------------------------------------
diff --git a/packaging/pom.xml b/packaging/pom.xml
index beddd1c..a128036 100644
--- a/packaging/pom.xml
+++ b/packaging/pom.xml
@@ -19,7 +19,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
- <version>3.0.0-SNAPSHOT</version>
+ <version>2.2.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/packaging/src/main/assembly/src.xml
----------------------------------------------------------------------
diff --git a/packaging/src/main/assembly/src.xml b/packaging/src/main/assembly/src.xml
index 8626922..0529e90 100644
--- a/packaging/src/main/assembly/src.xml
+++ b/packaging/src/main/assembly/src.xml
@@ -67,6 +67,7 @@
<include>contrib/**/*</include>
<include>data/**/*</include>
<include>dev-support/**/*</include>
+ <include>docs/**/*</include>
<include>druid-handler/**/*</include>
<include>jdbc-handler/**/*</include>
<include>find-bugs/**/*</include>
@@ -96,7 +97,6 @@
<include>spark-client/**/*</include>
<include>storage-api/**/*</include>
<include>testutils/**/*</include>
- <include>vector-code-gen/**/*</include>
</includes>
<outputDirectory>/</outputDirectory>
</fileSet>
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e0aae27..3ea3c77 100644
--- a/pom.xml
+++ b/pom.xml
@@ -17,11 +17,11 @@
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
- <version>18</version>
+ <version>14</version>
</parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
- <version>3.0.0-SNAPSHOT</version>
+ <version>2.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Hive</name>
@@ -61,12 +61,10 @@
</modules>
<properties>
- <hive.version.shortname>3.0.0</hive.version.shortname>
+ <hive.version.shortname>2.2.0</hive.version.shortname>
<!-- Build Properties -->
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
<maven.compiler.useIncrementalCompilation>false</maven.compiler.useIncrementalCompilation>
<maven.repo.local>${settings.localRepository}</maven.repo.local>
<hive.path.to.root>.</hive.path.to.root>
@@ -92,11 +90,11 @@
<!-- Plugin and Plugin Dependency Versions -->
<ant.contrib.version>1.0b3</ant.contrib.version>
<datanucleus.maven.plugin.version>3.3.0-release</datanucleus.maven.plugin.version>
- <maven.test.jvm.args>-Xmx1024m</maven.test.jvm.args>
+ <maven.test.jvm.args>-Xmx1024m -XX:MaxPermSize=256M</maven.test.jvm.args>
<maven.antrun.plugin.version>1.7</maven.antrun.plugin.version>
<maven.assembly.plugin.version>2.3</maven.assembly.plugin.version>
<maven.checkstyle.plugin.version>2.12.1</maven.checkstyle.plugin.version>
- <maven.compiler.plugin.version>3.6.1</maven.compiler.plugin.version>
+ <maven.compiler.plugin.version>3.1</maven.compiler.plugin.version>
<maven.enforcer.plugin.version>1.3.1</maven.enforcer.plugin.version>
<maven.install.plugin.version>2.4</maven.install.plugin.version>
<maven.jar.plugin.version>2.4</maven.jar.plugin.version>
@@ -115,10 +113,10 @@
<antlr.version>3.5.2</antlr.version>
<apache-directory-server.version>1.5.6</apache-directory-server.version>
<apache-directory-clientapi.version>0.1</apache-directory-clientapi.version>
- <avatica.version>1.9.0</avatica.version>
+ <avatica.version>1.8.0</avatica.version>
<avro.version>1.7.7</avro.version>
<bonecp.version>0.8.0.RELEASE</bonecp.version>
- <calcite.version>1.12.0</calcite.version>
+ <calcite.version>1.10.0</calcite.version>
<datanucleus-api-jdo.version>4.2.4</datanucleus-api-jdo.version>
<datanucleus-core.version>4.1.17</datanucleus-core.version>
<datanucleus-rdbms.version>4.1.19</datanucleus-rdbms.version>
@@ -141,16 +139,16 @@
<guava.version>14.0.1</guava.version>
<groovy.version>2.4.4</groovy.version>
<h2database.version>1.3.166</h2database.version>
- <hadoop.version>2.8.0</hadoop.version>
+ <hadoop.version>2.7.2</hadoop.version>
<hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
<hamcrest.version>1.1</hamcrest.version>
<hbase.version>1.1.1</hbase.version>
<!-- required for logging test to avoid including hbase which pulls disruptor transitively -->
<disruptor.version>3.3.0</disruptor.version>
- <hikaricp.version>2.6.1</hikaricp.version>
+ <hikaricp.version>2.5.1</hikaricp.version>
<!-- httpcomponents are not always in version sync -->
- <httpcomponents.client.version>4.5.2</httpcomponents.client.version>
- <httpcomponents.core.version>4.4.4</httpcomponents.core.version>
+ <httpcomponents.client.version>4.4</httpcomponents.client.version>
+ <httpcomponents.core.version>4.4</httpcomponents.core.version>
<ivy.version>2.4.0</ivy.version>
<jackson.version>1.9.13</jackson.version>
<!-- jackson 1 and 2 lines can coexist without issue, as they have different artifactIds -->
@@ -159,10 +157,10 @@
<jamon.plugin.version>2.3.4</jamon.plugin.version>
<jamon-runtime.version>2.3.1</jamon-runtime.version>
<javaewah.version>0.3.2</javaewah.version>
- <javax-servlet.version>3.1.0</javax-servlet.version>
+ <javax-servlet.version>3.0.0.v201112011016</javax-servlet.version>
<javolution.version>5.5.1</javolution.version>
<jdo-api.version>3.0.1</jdo-api.version>
- <jetty.version>9.3.8.v20160314</jetty.version>
+ <jetty.version>7.6.0.v20120127</jetty.version>
<jersey.version>1.14</jersey.version>
<!-- Glassfish jersey is included for Spark client test only -->
<glassfish.jersey.version>2.22.2</glassfish.jersey.version>
@@ -177,7 +175,7 @@
<libthrift.version>0.9.3</libthrift.version>
<log4j2.version>2.6.2</log4j2.version>
<opencsv.version>2.3</opencsv.version>
- <orc.version>1.3.3</orc.version>
+ <orc.version>1.3.1</orc.version>
<mockito-all.version>1.9.5</mockito-all.version>
<mina.version>2.0.0-M5</mina.version>
<netty.version>4.0.29.Final</netty.version>
@@ -187,9 +185,9 @@
<stax.version>1.0.1</stax.version>
<slf4j.version>1.7.10</slf4j.version>
<ST4.version>4.0.4</ST4.version>
- <storage-api.version>3.0.0-SNAPSHOT</storage-api.version>
+ <storage-api.version>2.3.0-SNAPSHOT</storage-api.version>
<tez.version>0.8.4</tez.version>
- <slider.version>0.92.0-incubating</slider.version>
+ <slider.version>0.90.2-incubating</slider.version>
<super-csv.version>2.2.0</super-csv.version>
<spark.version>2.0.0</spark.version>
<scala.binary.version>2.11</scala.binary.version>
@@ -610,31 +608,11 @@
<version>${jackson.version}</version>
</dependency>
<dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-rewrite</artifactId>
+ <groupId>org.eclipse.jetty.aggregate</groupId>
+ <artifactId>jetty-all-server</artifactId>
<version>${jetty.version}</version>
</dependency>
<dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-server</artifactId>
- <version>${jetty.version}</version>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-servlet</artifactId>
- <version>${jetty.version}</version>
- </dependency>
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-webapp</artifactId>
- <version>${jetty.version}</version>
- </dependency>
- <dependency>
- <groupId>javax.servlet</groupId>
- <artifactId>javax.servlet-api</artifactId>
- <version>${javax-servlet.version}</version>
- </dependency>
- <dependency>
<groupId>org.datanucleus</groupId>
<artifactId>datanucleus-api-jdo</artifactId>
<version>${datanucleus-api-jdo.version}</version>
@@ -684,24 +662,13 @@
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-auth</artifactId>
- <version>${hadoop.version}</version>
- <exclusions>
- <exclusion>
- <groupId>commmons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
- <exclusion>
+ <exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
@@ -741,21 +708,6 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-common</artifactId>
- <version>${hadoop.version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commmons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
<exclusions>
@@ -768,43 +720,13 @@
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
- </dependency>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-api</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-client</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-common</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-registry</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-server-web-common</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-server-web-proxy</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>${hbase.version}</version>
@@ -881,6 +803,10 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven.compiler.plugin.version}</version>
+ <configuration>
+ <source>1.7</source>
+ <target>1.7</target>
+ </configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -1180,12 +1106,12 @@
<version>0.10</version>
<configuration>
<excludes>
- <exclude>binary-package-licenses/**</exclude>
<exclude>data/**</exclude>
<exclude>conf/**</exclude>
<exclude>checkstyle/**</exclude>
<exclude>bin/**</exclude>
<exclude>itests/**</exclude>
+ <exclude>docs/**</exclude>
<exclude>**/README.md</exclude>
<exclude>**/*.iml</exclude>
<exclude>**/*.txt</exclude>
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/pom.xml
----------------------------------------------------------------------
diff --git a/ql/pom.xml b/ql/pom.xml
index 40a216b..7db0ede 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -19,7 +19,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
- <version>3.0.0-SNAPSHOT</version>
+ <version>2.2.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@@ -229,13 +229,6 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-registry</artifactId>
- <version>${hadoop.version}</version>
- <optional>true</optional>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
<optional>true</optional>
@@ -386,22 +379,12 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
- <exclusion>
- <groupId>org.apache.calcite.avatica</groupId>
- <artifactId>avatica-core</artifactId>
- </exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.calcite</groupId>
<artifactId>calcite-druid</artifactId>
<version>${calcite.version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.calcite.avatica</groupId>
- <artifactId>avatica-core</artifactId>
- </exclusion>
- </exclusions>
</dependency>
<dependency>
<groupId>org.apache.calcite.avatica</groupId>
@@ -729,12 +712,6 @@
<version>${glassfish.jersey.version}</version>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.hamcrest</groupId>
- <artifactId>hamcrest-all</artifactId>
- <version>${hamcrest.version}</version>
- <scope>test</scope>
- </dependency>
</dependencies>
<profiles>
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
index 46cbb5b..4393c3b 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
@@ -471,7 +471,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt
index 2261e1b..7468c2f 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt
@@ -442,7 +442,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
index 58d2d22..57b7ea5 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
@@ -458,7 +458,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt
index 515692e..749e97e 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt
@@ -441,7 +441,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt
index c210e4c..9dfc147 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt
@@ -81,7 +81,7 @@ public class <ClassName> extends VectorAggregateExpression {
@Override
public int getVariableSize() {
JavaDataModel model = JavaDataModel.get();
- return (int) model.lengthForByteArrayOfSize(bytes.length);
+ return model.lengthForByteArrayOfSize(bytes.length);
}
@Override
@@ -388,7 +388,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
index 074aefd..32ecb34 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
@@ -443,7 +443,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt
index a89ae0a..bd0f14d 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt
@@ -433,7 +433,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object(),
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt
index 1e3516b..dc9d4b1 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt
@@ -513,7 +513,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt
index b3ec7e9..01062a9 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt
@@ -467,7 +467,7 @@ public class <ClassName> extends VectorAggregateExpression {
}
@Override
- public long getAggregationBufferFixedSize() {
+ public int getAggregationBufferFixedSize() {
JavaDataModel model = JavaDataModel.get();
return JavaDataModel.alignUp(
model.object() +
@@ -488,4 +488,4 @@ public class <ClassName> extends VectorAggregateExpression {
public void setInputExpression(VectorExpression inputExpression) {
this.inputExpression = inputExpression;
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/ed64a74e/ql/src/java/org/apache/hadoop/hive/ql/Context.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
index da1d3a5..758c536 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
@@ -356,7 +356,9 @@ public class Context {
if (mkdir) {
try {
- if (!FileUtils.mkdir(fs, dir, conf)) {
+ boolean inheritPerms = HiveConf.getBoolVar(conf,
+ HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
+ if (!FileUtils.mkdir(fs, dir, inheritPerms, conf)) {
throw new IllegalStateException("Cannot create staging directory '" + dir.toString() + "'");
}
@@ -949,13 +951,6 @@ public class Context {
public ExplainConfiguration getExplainConfig() {
return explainConfig;
}
- private boolean isExplainPlan = false;
- public boolean isExplainPlan() {
- return isExplainPlan;
- }
- public void setExplainPlan(boolean t) {
- this.isExplainPlan = t;
- }
public void setExplainConfig(ExplainConfiguration explainConfig) {
this.explainConfig = explainConfig;