You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by el...@apache.org on 2017/09/12 23:07:31 UTC
[1/4] phoenix git commit: PHOENIX-4191 Categorize uncategorized
integration tests
Repository: phoenix
Updated Branches:
refs/heads/4.x-HBase-0.98 ae6077609 -> 7f38f7e70
refs/heads/4.x-HBase-1.1 acb9e13af -> ca8bd4a90
refs/heads/4.x-HBase-1.2 8f2b2be26 -> b1751c4de
refs/heads/master b53de2041 -> d9ac3f109
PHOENIX-4191 Categorize uncategorized integration tests
Uncategorized tests results in Maven not running them.
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d9ac3f10
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d9ac3f10
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d9ac3f10
Branch: refs/heads/master
Commit: d9ac3f109b86d70a3ec8f5e82175f03343be10d0
Parents: b53de20
Author: Josh Elser <el...@apache.org>
Authored: Tue Sep 12 18:26:36 2017 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue Sep 12 18:34:32 2017 -0400
----------------------------------------------------------------------
.../wal/ReadWriteKeyValuesWithCodecIT.java | 184 ------------------
.../phoenix/end2end/IndexScrutinyToolIT.java | 2 +
.../end2end/SystemTablePermissionsIT.java | 2 +
.../wal/ReadWriteKeyValuesWithCodecTest.java | 186 +++++++++++++++++++
.../phoenix/hive/BaseHivePhoenixStoreIT.java | 3 +
.../apache/phoenix/hive/HivePhoenixStoreIT.java | 3 +
6 files changed, 196 insertions(+), 184 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d9ac3f10/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
deleted file mode 100644
index 39eb871..0000000
--- a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.regionserver.wal;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.codec.Codec;
-import org.apache.hadoop.hbase.io.util.LRUDictionary;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.phoenix.hbase.index.IndexTestingUtils;
-import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-/**
- * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
- * encoding/decoding without going through a cluster.
- */
-public class ReadWriteKeyValuesWithCodecIT {
-
- private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final byte[] ROW = Bytes.toBytes("row");
- private static final byte[] FAMILY = Bytes.toBytes("family");
-
- @BeforeClass
- public static void setupCodec() {
- Configuration conf = UTIL.getConfiguration();
- IndexTestingUtils.setupConfig(conf);
- conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
- }
-
- @Test
- public void testWithoutCompression() throws Exception {
- // get the FS ready to read/write the edits
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- writeReadAndVerify(null, fs, edits, testFile);
- }
-
- @Test
- public void testWithCompression() throws Exception {
- // get the FS ready to read/write the edit
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
- writeReadAndVerify(compression, fs, edits, testFile);
- }
-
- /**
- * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
- */
- private List<WALEdit> getEdits() {
- // Build up a couple of edits
- List<WALEdit> edits = new ArrayList<WALEdit>();
- Put p = new Put(ROW);
- p.add(FAMILY, null, Bytes.toBytes("v1"));
-
- WALEdit withPut = new WALEdit();
- addMutation(withPut, p, FAMILY);
- edits.add(withPut);
-
- Delete d = new Delete(ROW);
- d.deleteColumn(FAMILY, null);
- WALEdit withDelete = new WALEdit();
- addMutation(withDelete, d, FAMILY);
- edits.add(withDelete);
-
- WALEdit withPutsAndDeletes = new WALEdit();
- addMutation(withPutsAndDeletes, d, FAMILY);
- addMutation(withPutsAndDeletes, p, FAMILY);
- edits.add(withPutsAndDeletes);
-
- WALEdit justIndexUpdates = new WALEdit();
- byte[] table = Bytes.toBytes("targetTable");
- IndexedKeyValue ikv = new IndexedKeyValue(table, p);
- justIndexUpdates.add(ikv);
- edits.add(justIndexUpdates);
-
- WALEdit mixed = new WALEdit();
- addMutation(mixed, d, FAMILY);
- mixed.add(ikv);
- addMutation(mixed, p, FAMILY);
- edits.add(mixed);
-
- return edits;
- }
-
- /**
- * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
- * {@link WALEdit}.
- */
- private void addMutation(WALEdit edit, Mutation m, byte[] family) {
- List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
- for (Cell kv : kvs) {
- edit.add(KeyValueUtil.ensureKeyValue(kv));
- }
- }
-
-
- private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
- out.writeInt(kvs.size());
- Codec.Encoder cellEncoder = codec.getEncoder(out);
- // We interleave the two lists for code simplicity
- for (Cell kv : kvs) {
- cellEncoder.write(kv);
- }
- }
-
- /**
- * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
- * read them back in and ensure that we read the same thing we wrote.
- */
- private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
- Path testFile) throws IOException {
-
- WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
- // write the edits out
- FSDataOutputStream out = fs.create(testFile);
- for (WALEdit edit : edits) {
- writeWALEdit(codec, edit.getCells(), out);
- }
- out.close();
-
- // read in the edits
- FSDataInputStream in = fs.open(testFile);
- List<WALEdit> read = new ArrayList<WALEdit>();
- for (int i = 0; i < edits.size(); i++) {
- WALEdit edit = new WALEdit();
- int numEdits = in.readInt();
- edit.readFromCells(codec.getDecoder(in), numEdits);
- read.add(edit);
- }
- in.close();
-
- // make sure the read edits match the written
- for(int i=0; i< edits.size(); i++){
- WALEdit expected = edits.get(i);
- WALEdit found = read.get(i);
- for(int j=0; j< expected.getCells().size(); j++){
- Cell fkv = found.getCells().get(j);
- Cell ekv = expected.getCells().get(j);
- assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d9ac3f10/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
index 5068610..f868cef 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
@@ -58,6 +58,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@@ -65,6 +66,7 @@ import com.google.common.collect.Maps;
/**
* Tests for the {@link IndexScrutinyTool}
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class IndexScrutinyToolIT extends BaseTest {
private static final String DATA_TABLE_DDL =
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d9ac3f10/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
index 9f213c8..166b135 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
@@ -45,10 +45,12 @@ import org.apache.phoenix.query.QueryServices;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
/**
* Test that verifies a user can read Phoenix tables with a minimal set of permissions.
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class SystemTablePermissionsIT {
private static String SUPERUSER;
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d9ac3f10/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
new file mode 100644
index 0000000..469dd21
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.codec.Codec;
+import org.apache.hadoop.hbase.io.util.LRUDictionary;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.hbase.index.IndexTestingUtils;
+import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
+ * encoding/decoding without going through a cluster.
+ */
+public class ReadWriteKeyValuesWithCodecTest {
+
+ private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ private static final byte[] ROW = Bytes.toBytes("row");
+ private static final byte[] FAMILY = Bytes.toBytes("family");
+
+ @BeforeClass
+ public static void setupCodec() {
+ Configuration conf = UTIL.getConfiguration();
+ IndexTestingUtils.setupConfig(conf);
+ conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
+ }
+
+ @Test
+ public void testWithoutCompression() throws Exception {
+ // get the FS ready to read/write the edits
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ writeReadAndVerify(null, fs, edits, testFile);
+ }
+
+ @Test
+ public void testWithCompression() throws Exception {
+ // get the FS ready to read/write the edit
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
+ writeReadAndVerify(compression, fs, edits, testFile);
+ }
+
+ /**
+ * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
+ */
+ private List<WALEdit> getEdits() {
+ // Build up a couple of edits
+ List<WALEdit> edits = new ArrayList<WALEdit>();
+ Put p = new Put(ROW);
+ p.add(FAMILY, null, Bytes.toBytes("v1"));
+
+ WALEdit withPut = new WALEdit();
+ addMutation(withPut, p, FAMILY);
+ edits.add(withPut);
+
+ Delete d = new Delete(ROW);
+ d.deleteColumn(FAMILY, null);
+ WALEdit withDelete = new WALEdit();
+ addMutation(withDelete, d, FAMILY);
+ edits.add(withDelete);
+
+ WALEdit withPutsAndDeletes = new WALEdit();
+ addMutation(withPutsAndDeletes, d, FAMILY);
+ addMutation(withPutsAndDeletes, p, FAMILY);
+ edits.add(withPutsAndDeletes);
+
+ WALEdit justIndexUpdates = new WALEdit();
+ byte[] table = Bytes.toBytes("targetTable");
+ IndexedKeyValue ikv = new IndexedKeyValue(table, p);
+ justIndexUpdates.add(ikv);
+ edits.add(justIndexUpdates);
+
+ WALEdit mixed = new WALEdit();
+ addMutation(mixed, d, FAMILY);
+ mixed.add(ikv);
+ addMutation(mixed, p, FAMILY);
+ edits.add(mixed);
+
+ return edits;
+ }
+
+ /**
+ * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
+ * {@link WALEdit}.
+ */
+ private void addMutation(WALEdit edit, Mutation m, byte[] family) {
+ List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
+ for (Cell kv : kvs) {
+ edit.add(KeyValueUtil.ensureKeyValue(kv));
+ }
+ }
+
+
+ private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
+ out.writeInt(kvs.size());
+ Codec.Encoder cellEncoder = codec.getEncoder(out);
+ // We interleave the two lists for code simplicity
+ for (Cell kv : kvs) {
+ cellEncoder.write(kv);
+ }
+ }
+
+ /**
+ * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
+ * read them back in and ensure that we read the same thing we wrote.
+ */
+ private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
+ Path testFile) throws IOException {
+
+ WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
+ // write the edits out
+ FSDataOutputStream out = fs.create(testFile);
+ for (WALEdit edit : edits) {
+ writeWALEdit(codec, edit.getCells(), out);
+ }
+ out.close();
+
+ // read in the edits
+ FSDataInputStream in = fs.open(testFile);
+ List<WALEdit> read = new ArrayList<WALEdit>();
+ for (int i = 0; i < edits.size(); i++) {
+ WALEdit edit = new WALEdit();
+ int numEdits = in.readInt();
+ edit.readFromCells(codec.getDecoder(in), numEdits);
+ read.add(edit);
+ }
+ in.close();
+
+ // make sure the read edits match the written
+ for(int i=0; i< edits.size(); i++){
+ WALEdit expected = edits.get(i);
+ WALEdit found = read.get(i);
+ for(int j=0; j< expected.getCells().size(); j++){
+ Cell fkv = found.getCells().get(j);
+ Cell ekv = expected.getCells().get(j);
+ assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d9ac3f10/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index afb06ae..c705e2d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -25,12 +25,14 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.jdbc.PhoenixDriver;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.AfterClass;
+import org.junit.experimental.categories.Category;
import java.io.File;
import java.io.IOException;
@@ -44,6 +46,7 @@ import static org.junit.Assert.fail;
/**
* Base class for all Hive Phoenix integration tests that may be run with Tez or MR mini cluster
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class BaseHivePhoenixStoreIT {
private static final Log LOG = LogFactory.getLog(BaseHivePhoenixStoreIT.class);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d9ac3f10/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index cf12a80..1828818 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -18,9 +18,11 @@
package org.apache.phoenix.hive;
import org.apache.hadoop.fs.Path;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.util.StringUtil;
import org.junit.Ignore;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -31,6 +33,7 @@ import static org.junit.Assert.assertTrue;
* Test methods only. All supporting methods should be placed to BaseHivePhoenixStoreIT
*/
+@Category(NeedsOwnMiniClusterTest.class)
@Ignore("This class contains only test methods and should not be executed directly")
public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
[4/4] phoenix git commit: PHOENIX-4191 Categorize uncategorized
integration tests
Posted by el...@apache.org.
PHOENIX-4191 Categorize uncategorized integration tests
Uncategorized tests results in Maven not running them.
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7f38f7e7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7f38f7e7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7f38f7e7
Branch: refs/heads/4.x-HBase-0.98
Commit: 7f38f7e70ad420a253f0ce2709ebebccb4572062
Parents: ae60776
Author: Josh Elser <el...@apache.org>
Authored: Tue Sep 12 18:26:36 2017 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue Sep 12 19:00:00 2017 -0400
----------------------------------------------------------------------
.../wal/ReadWriteKeyValuesWithCodecIT.java | 184 ------------------
.../phoenix/end2end/IndexScrutinyToolIT.java | 2 +
.../end2end/SystemTablePermissionsIT.java | 2 +
.../wal/ReadWriteKeyValuesWithCodecTest.java | 186 +++++++++++++++++++
.../phoenix/hive/BaseHivePhoenixStoreIT.java | 3 +
.../apache/phoenix/hive/HivePhoenixStoreIT.java | 3 +
6 files changed, 196 insertions(+), 184 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7f38f7e7/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
deleted file mode 100644
index 39eb871..0000000
--- a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.regionserver.wal;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.codec.Codec;
-import org.apache.hadoop.hbase.io.util.LRUDictionary;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.phoenix.hbase.index.IndexTestingUtils;
-import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-/**
- * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
- * encoding/decoding without going through a cluster.
- */
-public class ReadWriteKeyValuesWithCodecIT {
-
- private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final byte[] ROW = Bytes.toBytes("row");
- private static final byte[] FAMILY = Bytes.toBytes("family");
-
- @BeforeClass
- public static void setupCodec() {
- Configuration conf = UTIL.getConfiguration();
- IndexTestingUtils.setupConfig(conf);
- conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
- }
-
- @Test
- public void testWithoutCompression() throws Exception {
- // get the FS ready to read/write the edits
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- writeReadAndVerify(null, fs, edits, testFile);
- }
-
- @Test
- public void testWithCompression() throws Exception {
- // get the FS ready to read/write the edit
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
- writeReadAndVerify(compression, fs, edits, testFile);
- }
-
- /**
- * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
- */
- private List<WALEdit> getEdits() {
- // Build up a couple of edits
- List<WALEdit> edits = new ArrayList<WALEdit>();
- Put p = new Put(ROW);
- p.add(FAMILY, null, Bytes.toBytes("v1"));
-
- WALEdit withPut = new WALEdit();
- addMutation(withPut, p, FAMILY);
- edits.add(withPut);
-
- Delete d = new Delete(ROW);
- d.deleteColumn(FAMILY, null);
- WALEdit withDelete = new WALEdit();
- addMutation(withDelete, d, FAMILY);
- edits.add(withDelete);
-
- WALEdit withPutsAndDeletes = new WALEdit();
- addMutation(withPutsAndDeletes, d, FAMILY);
- addMutation(withPutsAndDeletes, p, FAMILY);
- edits.add(withPutsAndDeletes);
-
- WALEdit justIndexUpdates = new WALEdit();
- byte[] table = Bytes.toBytes("targetTable");
- IndexedKeyValue ikv = new IndexedKeyValue(table, p);
- justIndexUpdates.add(ikv);
- edits.add(justIndexUpdates);
-
- WALEdit mixed = new WALEdit();
- addMutation(mixed, d, FAMILY);
- mixed.add(ikv);
- addMutation(mixed, p, FAMILY);
- edits.add(mixed);
-
- return edits;
- }
-
- /**
- * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
- * {@link WALEdit}.
- */
- private void addMutation(WALEdit edit, Mutation m, byte[] family) {
- List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
- for (Cell kv : kvs) {
- edit.add(KeyValueUtil.ensureKeyValue(kv));
- }
- }
-
-
- private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
- out.writeInt(kvs.size());
- Codec.Encoder cellEncoder = codec.getEncoder(out);
- // We interleave the two lists for code simplicity
- for (Cell kv : kvs) {
- cellEncoder.write(kv);
- }
- }
-
- /**
- * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
- * read them back in and ensure that we read the same thing we wrote.
- */
- private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
- Path testFile) throws IOException {
-
- WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
- // write the edits out
- FSDataOutputStream out = fs.create(testFile);
- for (WALEdit edit : edits) {
- writeWALEdit(codec, edit.getCells(), out);
- }
- out.close();
-
- // read in the edits
- FSDataInputStream in = fs.open(testFile);
- List<WALEdit> read = new ArrayList<WALEdit>();
- for (int i = 0; i < edits.size(); i++) {
- WALEdit edit = new WALEdit();
- int numEdits = in.readInt();
- edit.readFromCells(codec.getDecoder(in), numEdits);
- read.add(edit);
- }
- in.close();
-
- // make sure the read edits match the written
- for(int i=0; i< edits.size(); i++){
- WALEdit expected = edits.get(i);
- WALEdit found = read.get(i);
- for(int j=0; j< expected.getCells().size(); j++){
- Cell fkv = found.getCells().get(j);
- Cell ekv = expected.getCells().get(j);
- assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7f38f7e7/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
index 5068610..f868cef 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
@@ -58,6 +58,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@@ -65,6 +66,7 @@ import com.google.common.collect.Maps;
/**
* Tests for the {@link IndexScrutinyTool}
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class IndexScrutinyToolIT extends BaseTest {
private static final String DATA_TABLE_DDL =
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7f38f7e7/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
index e99f322..1499f27 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
@@ -45,10 +45,12 @@ import org.apache.phoenix.query.QueryServices;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
/**
* Test that verifies a user can read Phoenix tables with a minimal set of permissions.
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class SystemTablePermissionsIT {
private static String SUPERUSER;
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7f38f7e7/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
new file mode 100644
index 0000000..469dd21
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.codec.Codec;
+import org.apache.hadoop.hbase.io.util.LRUDictionary;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.hbase.index.IndexTestingUtils;
+import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
+ * encoding/decoding without going through a cluster.
+ */
+public class ReadWriteKeyValuesWithCodecTest {
+
+ private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ private static final byte[] ROW = Bytes.toBytes("row");
+ private static final byte[] FAMILY = Bytes.toBytes("family");
+
+ @BeforeClass
+ public static void setupCodec() {
+ Configuration conf = UTIL.getConfiguration();
+ IndexTestingUtils.setupConfig(conf);
+ conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
+ }
+
+ @Test
+ public void testWithoutCompression() throws Exception {
+ // get the FS ready to read/write the edits
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ writeReadAndVerify(null, fs, edits, testFile);
+ }
+
+ @Test
+ public void testWithCompression() throws Exception {
+ // get the FS ready to read/write the edit
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
+ writeReadAndVerify(compression, fs, edits, testFile);
+ }
+
+ /**
+ * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
+ */
+ private List<WALEdit> getEdits() {
+ // Build up a couple of edits
+ List<WALEdit> edits = new ArrayList<WALEdit>();
+ Put p = new Put(ROW);
+ p.add(FAMILY, null, Bytes.toBytes("v1"));
+
+ WALEdit withPut = new WALEdit();
+ addMutation(withPut, p, FAMILY);
+ edits.add(withPut);
+
+ Delete d = new Delete(ROW);
+ d.deleteColumn(FAMILY, null);
+ WALEdit withDelete = new WALEdit();
+ addMutation(withDelete, d, FAMILY);
+ edits.add(withDelete);
+
+ WALEdit withPutsAndDeletes = new WALEdit();
+ addMutation(withPutsAndDeletes, d, FAMILY);
+ addMutation(withPutsAndDeletes, p, FAMILY);
+ edits.add(withPutsAndDeletes);
+
+ WALEdit justIndexUpdates = new WALEdit();
+ byte[] table = Bytes.toBytes("targetTable");
+ IndexedKeyValue ikv = new IndexedKeyValue(table, p);
+ justIndexUpdates.add(ikv);
+ edits.add(justIndexUpdates);
+
+ WALEdit mixed = new WALEdit();
+ addMutation(mixed, d, FAMILY);
+ mixed.add(ikv);
+ addMutation(mixed, p, FAMILY);
+ edits.add(mixed);
+
+ return edits;
+ }
+
+ /**
+ * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
+ * {@link WALEdit}.
+ */
+ private void addMutation(WALEdit edit, Mutation m, byte[] family) {
+ List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
+ for (Cell kv : kvs) {
+ edit.add(KeyValueUtil.ensureKeyValue(kv));
+ }
+ }
+
+
+ private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
+ out.writeInt(kvs.size());
+ Codec.Encoder cellEncoder = codec.getEncoder(out);
+ // We interleave the two lists for code simplicity
+ for (Cell kv : kvs) {
+ cellEncoder.write(kv);
+ }
+ }
+
+ /**
+ * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
+ * read them back in and ensure that we read the same thing we wrote.
+ */
+ private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
+ Path testFile) throws IOException {
+
+ WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
+ // write the edits out
+ FSDataOutputStream out = fs.create(testFile);
+ for (WALEdit edit : edits) {
+ writeWALEdit(codec, edit.getCells(), out);
+ }
+ out.close();
+
+ // read in the edits
+ FSDataInputStream in = fs.open(testFile);
+ List<WALEdit> read = new ArrayList<WALEdit>();
+ for (int i = 0; i < edits.size(); i++) {
+ WALEdit edit = new WALEdit();
+ int numEdits = in.readInt();
+ edit.readFromCells(codec.getDecoder(in), numEdits);
+ read.add(edit);
+ }
+ in.close();
+
+ // make sure the read edits match the written
+ for(int i=0; i< edits.size(); i++){
+ WALEdit expected = edits.get(i);
+ WALEdit found = read.get(i);
+ for(int j=0; j< expected.getCells().size(); j++){
+ Cell fkv = found.getCells().get(j);
+ Cell ekv = expected.getCells().get(j);
+ assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7f38f7e7/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index afb06ae..c705e2d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -25,12 +25,14 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.jdbc.PhoenixDriver;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.AfterClass;
+import org.junit.experimental.categories.Category;
import java.io.File;
import java.io.IOException;
@@ -44,6 +46,7 @@ import static org.junit.Assert.fail;
/**
* Base class for all Hive Phoenix integration tests that may be run with Tez or MR mini cluster
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class BaseHivePhoenixStoreIT {
private static final Log LOG = LogFactory.getLog(BaseHivePhoenixStoreIT.class);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7f38f7e7/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index cf12a80..1828818 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -18,9 +18,11 @@
package org.apache.phoenix.hive;
import org.apache.hadoop.fs.Path;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.util.StringUtil;
import org.junit.Ignore;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -31,6 +33,7 @@ import static org.junit.Assert.assertTrue;
* Test methods only. All supporting methods should be placed to BaseHivePhoenixStoreIT
*/
+@Category(NeedsOwnMiniClusterTest.class)
@Ignore("This class contains only test methods and should not be executed directly")
public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
[2/4] phoenix git commit: PHOENIX-4191 Categorize uncategorized
integration tests
Posted by el...@apache.org.
PHOENIX-4191 Categorize uncategorized integration tests
Uncategorized tests results in Maven not running them.
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b1751c4d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b1751c4d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b1751c4d
Branch: refs/heads/4.x-HBase-1.2
Commit: b1751c4deb9855c8a9e46425b4969684f629c5d3
Parents: 8f2b2be
Author: Josh Elser <el...@apache.org>
Authored: Tue Sep 12 18:26:36 2017 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue Sep 12 18:49:19 2017 -0400
----------------------------------------------------------------------
.../wal/ReadWriteKeyValuesWithCodecIT.java | 184 ------------------
.../phoenix/end2end/IndexScrutinyToolIT.java | 2 +
.../end2end/SystemTablePermissionsIT.java | 2 +
.../wal/ReadWriteKeyValuesWithCodecTest.java | 186 +++++++++++++++++++
.../phoenix/hive/BaseHivePhoenixStoreIT.java | 3 +
.../apache/phoenix/hive/HivePhoenixStoreIT.java | 3 +
6 files changed, 196 insertions(+), 184 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1751c4d/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
deleted file mode 100644
index 39eb871..0000000
--- a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.regionserver.wal;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.codec.Codec;
-import org.apache.hadoop.hbase.io.util.LRUDictionary;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.phoenix.hbase.index.IndexTestingUtils;
-import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-/**
- * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
- * encoding/decoding without going through a cluster.
- */
-public class ReadWriteKeyValuesWithCodecIT {
-
- private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final byte[] ROW = Bytes.toBytes("row");
- private static final byte[] FAMILY = Bytes.toBytes("family");
-
- @BeforeClass
- public static void setupCodec() {
- Configuration conf = UTIL.getConfiguration();
- IndexTestingUtils.setupConfig(conf);
- conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
- }
-
- @Test
- public void testWithoutCompression() throws Exception {
- // get the FS ready to read/write the edits
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- writeReadAndVerify(null, fs, edits, testFile);
- }
-
- @Test
- public void testWithCompression() throws Exception {
- // get the FS ready to read/write the edit
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
- writeReadAndVerify(compression, fs, edits, testFile);
- }
-
- /**
- * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
- */
- private List<WALEdit> getEdits() {
- // Build up a couple of edits
- List<WALEdit> edits = new ArrayList<WALEdit>();
- Put p = new Put(ROW);
- p.add(FAMILY, null, Bytes.toBytes("v1"));
-
- WALEdit withPut = new WALEdit();
- addMutation(withPut, p, FAMILY);
- edits.add(withPut);
-
- Delete d = new Delete(ROW);
- d.deleteColumn(FAMILY, null);
- WALEdit withDelete = new WALEdit();
- addMutation(withDelete, d, FAMILY);
- edits.add(withDelete);
-
- WALEdit withPutsAndDeletes = new WALEdit();
- addMutation(withPutsAndDeletes, d, FAMILY);
- addMutation(withPutsAndDeletes, p, FAMILY);
- edits.add(withPutsAndDeletes);
-
- WALEdit justIndexUpdates = new WALEdit();
- byte[] table = Bytes.toBytes("targetTable");
- IndexedKeyValue ikv = new IndexedKeyValue(table, p);
- justIndexUpdates.add(ikv);
- edits.add(justIndexUpdates);
-
- WALEdit mixed = new WALEdit();
- addMutation(mixed, d, FAMILY);
- mixed.add(ikv);
- addMutation(mixed, p, FAMILY);
- edits.add(mixed);
-
- return edits;
- }
-
- /**
- * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
- * {@link WALEdit}.
- */
- private void addMutation(WALEdit edit, Mutation m, byte[] family) {
- List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
- for (Cell kv : kvs) {
- edit.add(KeyValueUtil.ensureKeyValue(kv));
- }
- }
-
-
- private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
- out.writeInt(kvs.size());
- Codec.Encoder cellEncoder = codec.getEncoder(out);
- // We interleave the two lists for code simplicity
- for (Cell kv : kvs) {
- cellEncoder.write(kv);
- }
- }
-
- /**
- * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
- * read them back in and ensure that we read the same thing we wrote.
- */
- private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
- Path testFile) throws IOException {
-
- WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
- // write the edits out
- FSDataOutputStream out = fs.create(testFile);
- for (WALEdit edit : edits) {
- writeWALEdit(codec, edit.getCells(), out);
- }
- out.close();
-
- // read in the edits
- FSDataInputStream in = fs.open(testFile);
- List<WALEdit> read = new ArrayList<WALEdit>();
- for (int i = 0; i < edits.size(); i++) {
- WALEdit edit = new WALEdit();
- int numEdits = in.readInt();
- edit.readFromCells(codec.getDecoder(in), numEdits);
- read.add(edit);
- }
- in.close();
-
- // make sure the read edits match the written
- for(int i=0; i< edits.size(); i++){
- WALEdit expected = edits.get(i);
- WALEdit found = read.get(i);
- for(int j=0; j< expected.getCells().size(); j++){
- Cell fkv = found.getCells().get(j);
- Cell ekv = expected.getCells().get(j);
- assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1751c4d/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
index 5068610..f868cef 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
@@ -58,6 +58,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@@ -65,6 +66,7 @@ import com.google.common.collect.Maps;
/**
* Tests for the {@link IndexScrutinyTool}
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class IndexScrutinyToolIT extends BaseTest {
private static final String DATA_TABLE_DDL =
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1751c4d/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
index 9f213c8..166b135 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
@@ -45,10 +45,12 @@ import org.apache.phoenix.query.QueryServices;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
/**
* Test that verifies a user can read Phoenix tables with a minimal set of permissions.
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class SystemTablePermissionsIT {
private static String SUPERUSER;
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1751c4d/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
new file mode 100644
index 0000000..469dd21
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.codec.Codec;
+import org.apache.hadoop.hbase.io.util.LRUDictionary;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.hbase.index.IndexTestingUtils;
+import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
+ * encoding/decoding without going through a cluster.
+ */
+public class ReadWriteKeyValuesWithCodecTest {
+
+ private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ private static final byte[] ROW = Bytes.toBytes("row");
+ private static final byte[] FAMILY = Bytes.toBytes("family");
+
+ @BeforeClass
+ public static void setupCodec() {
+ Configuration conf = UTIL.getConfiguration();
+ IndexTestingUtils.setupConfig(conf);
+ conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
+ }
+
+ @Test
+ public void testWithoutCompression() throws Exception {
+ // get the FS ready to read/write the edits
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ writeReadAndVerify(null, fs, edits, testFile);
+ }
+
+ @Test
+ public void testWithCompression() throws Exception {
+ // get the FS ready to read/write the edit
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
+ writeReadAndVerify(compression, fs, edits, testFile);
+ }
+
+ /**
+ * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
+ */
+ private List<WALEdit> getEdits() {
+ // Build up a couple of edits
+ List<WALEdit> edits = new ArrayList<WALEdit>();
+ Put p = new Put(ROW);
+ p.add(FAMILY, null, Bytes.toBytes("v1"));
+
+ WALEdit withPut = new WALEdit();
+ addMutation(withPut, p, FAMILY);
+ edits.add(withPut);
+
+ Delete d = new Delete(ROW);
+ d.deleteColumn(FAMILY, null);
+ WALEdit withDelete = new WALEdit();
+ addMutation(withDelete, d, FAMILY);
+ edits.add(withDelete);
+
+ WALEdit withPutsAndDeletes = new WALEdit();
+ addMutation(withPutsAndDeletes, d, FAMILY);
+ addMutation(withPutsAndDeletes, p, FAMILY);
+ edits.add(withPutsAndDeletes);
+
+ WALEdit justIndexUpdates = new WALEdit();
+ byte[] table = Bytes.toBytes("targetTable");
+ IndexedKeyValue ikv = new IndexedKeyValue(table, p);
+ justIndexUpdates.add(ikv);
+ edits.add(justIndexUpdates);
+
+ WALEdit mixed = new WALEdit();
+ addMutation(mixed, d, FAMILY);
+ mixed.add(ikv);
+ addMutation(mixed, p, FAMILY);
+ edits.add(mixed);
+
+ return edits;
+ }
+
+ /**
+ * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
+ * {@link WALEdit}.
+ */
+ private void addMutation(WALEdit edit, Mutation m, byte[] family) {
+ List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
+ for (Cell kv : kvs) {
+ edit.add(KeyValueUtil.ensureKeyValue(kv));
+ }
+ }
+
+
+ private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
+ out.writeInt(kvs.size());
+ Codec.Encoder cellEncoder = codec.getEncoder(out);
+ // We interleave the two lists for code simplicity
+ for (Cell kv : kvs) {
+ cellEncoder.write(kv);
+ }
+ }
+
+ /**
+ * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
+ * read them back in and ensure that we read the same thing we wrote.
+ */
+ private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
+ Path testFile) throws IOException {
+
+ WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
+ // write the edits out
+ FSDataOutputStream out = fs.create(testFile);
+ for (WALEdit edit : edits) {
+ writeWALEdit(codec, edit.getCells(), out);
+ }
+ out.close();
+
+ // read in the edits
+ FSDataInputStream in = fs.open(testFile);
+ List<WALEdit> read = new ArrayList<WALEdit>();
+ for (int i = 0; i < edits.size(); i++) {
+ WALEdit edit = new WALEdit();
+ int numEdits = in.readInt();
+ edit.readFromCells(codec.getDecoder(in), numEdits);
+ read.add(edit);
+ }
+ in.close();
+
+ // make sure the read edits match the written
+ for(int i=0; i< edits.size(); i++){
+ WALEdit expected = edits.get(i);
+ WALEdit found = read.get(i);
+ for(int j=0; j< expected.getCells().size(); j++){
+ Cell fkv = found.getCells().get(j);
+ Cell ekv = expected.getCells().get(j);
+ assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1751c4d/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index afb06ae..c705e2d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -25,12 +25,14 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.jdbc.PhoenixDriver;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.AfterClass;
+import org.junit.experimental.categories.Category;
import java.io.File;
import java.io.IOException;
@@ -44,6 +46,7 @@ import static org.junit.Assert.fail;
/**
* Base class for all Hive Phoenix integration tests that may be run with Tez or MR mini cluster
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class BaseHivePhoenixStoreIT {
private static final Log LOG = LogFactory.getLog(BaseHivePhoenixStoreIT.class);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1751c4d/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index cf12a80..1828818 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -18,9 +18,11 @@
package org.apache.phoenix.hive;
import org.apache.hadoop.fs.Path;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.util.StringUtil;
import org.junit.Ignore;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -31,6 +33,7 @@ import static org.junit.Assert.assertTrue;
* Test methods only. All supporting methods should be placed to BaseHivePhoenixStoreIT
*/
+@Category(NeedsOwnMiniClusterTest.class)
@Ignore("This class contains only test methods and should not be executed directly")
public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
[3/4] phoenix git commit: PHOENIX-4191 Categorize uncategorized
integration tests
Posted by el...@apache.org.
PHOENIX-4191 Categorize uncategorized integration tests
Uncategorized tests results in Maven not running them.
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ca8bd4a9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ca8bd4a9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ca8bd4a9
Branch: refs/heads/4.x-HBase-1.1
Commit: ca8bd4a9043b36868cba5ea4acd9ceaf3985a3c8
Parents: acb9e13
Author: Josh Elser <el...@apache.org>
Authored: Tue Sep 12 18:26:36 2017 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue Sep 12 18:54:29 2017 -0400
----------------------------------------------------------------------
.../wal/ReadWriteKeyValuesWithCodecIT.java | 184 ------------------
.../phoenix/end2end/IndexScrutinyToolIT.java | 2 +
.../end2end/SystemTablePermissionsIT.java | 2 +
.../wal/ReadWriteKeyValuesWithCodecTest.java | 186 +++++++++++++++++++
.../phoenix/hive/BaseHivePhoenixStoreIT.java | 3 +
.../apache/phoenix/hive/HivePhoenixStoreIT.java | 3 +
6 files changed, 196 insertions(+), 184 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca8bd4a9/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
deleted file mode 100644
index 39eb871..0000000
--- a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.regionserver.wal;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.codec.Codec;
-import org.apache.hadoop.hbase.io.util.LRUDictionary;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.phoenix.hbase.index.IndexTestingUtils;
-import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-/**
- * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
- * encoding/decoding without going through a cluster.
- */
-public class ReadWriteKeyValuesWithCodecIT {
-
- private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- private static final byte[] ROW = Bytes.toBytes("row");
- private static final byte[] FAMILY = Bytes.toBytes("family");
-
- @BeforeClass
- public static void setupCodec() {
- Configuration conf = UTIL.getConfiguration();
- IndexTestingUtils.setupConfig(conf);
- conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
- }
-
- @Test
- public void testWithoutCompression() throws Exception {
- // get the FS ready to read/write the edits
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- writeReadAndVerify(null, fs, edits, testFile);
- }
-
- @Test
- public void testWithCompression() throws Exception {
- // get the FS ready to read/write the edit
- Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
- Path testFile = new Path(testDir, "testfile");
- FileSystem fs = UTIL.getTestFileSystem();
-
- List<WALEdit> edits = getEdits();
- CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
- writeReadAndVerify(compression, fs, edits, testFile);
- }
-
- /**
- * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
- */
- private List<WALEdit> getEdits() {
- // Build up a couple of edits
- List<WALEdit> edits = new ArrayList<WALEdit>();
- Put p = new Put(ROW);
- p.add(FAMILY, null, Bytes.toBytes("v1"));
-
- WALEdit withPut = new WALEdit();
- addMutation(withPut, p, FAMILY);
- edits.add(withPut);
-
- Delete d = new Delete(ROW);
- d.deleteColumn(FAMILY, null);
- WALEdit withDelete = new WALEdit();
- addMutation(withDelete, d, FAMILY);
- edits.add(withDelete);
-
- WALEdit withPutsAndDeletes = new WALEdit();
- addMutation(withPutsAndDeletes, d, FAMILY);
- addMutation(withPutsAndDeletes, p, FAMILY);
- edits.add(withPutsAndDeletes);
-
- WALEdit justIndexUpdates = new WALEdit();
- byte[] table = Bytes.toBytes("targetTable");
- IndexedKeyValue ikv = new IndexedKeyValue(table, p);
- justIndexUpdates.add(ikv);
- edits.add(justIndexUpdates);
-
- WALEdit mixed = new WALEdit();
- addMutation(mixed, d, FAMILY);
- mixed.add(ikv);
- addMutation(mixed, p, FAMILY);
- edits.add(mixed);
-
- return edits;
- }
-
- /**
- * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
- * {@link WALEdit}.
- */
- private void addMutation(WALEdit edit, Mutation m, byte[] family) {
- List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
- for (Cell kv : kvs) {
- edit.add(KeyValueUtil.ensureKeyValue(kv));
- }
- }
-
-
- private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
- out.writeInt(kvs.size());
- Codec.Encoder cellEncoder = codec.getEncoder(out);
- // We interleave the two lists for code simplicity
- for (Cell kv : kvs) {
- cellEncoder.write(kv);
- }
- }
-
- /**
- * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
- * read them back in and ensure that we read the same thing we wrote.
- */
- private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
- Path testFile) throws IOException {
-
- WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
- // write the edits out
- FSDataOutputStream out = fs.create(testFile);
- for (WALEdit edit : edits) {
- writeWALEdit(codec, edit.getCells(), out);
- }
- out.close();
-
- // read in the edits
- FSDataInputStream in = fs.open(testFile);
- List<WALEdit> read = new ArrayList<WALEdit>();
- for (int i = 0; i < edits.size(); i++) {
- WALEdit edit = new WALEdit();
- int numEdits = in.readInt();
- edit.readFromCells(codec.getDecoder(in), numEdits);
- read.add(edit);
- }
- in.close();
-
- // make sure the read edits match the written
- for(int i=0; i< edits.size(); i++){
- WALEdit expected = edits.get(i);
- WALEdit found = read.get(i);
- for(int j=0; j< expected.getCells().size(); j++){
- Cell fkv = found.getCells().get(j);
- Cell ekv = expected.getCells().get(j);
- assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca8bd4a9/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
index 5068610..f868cef 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
@@ -58,6 +58,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@@ -65,6 +66,7 @@ import com.google.common.collect.Maps;
/**
* Tests for the {@link IndexScrutinyTool}
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class IndexScrutinyToolIT extends BaseTest {
private static final String DATA_TABLE_DDL =
http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca8bd4a9/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
index 9f213c8..166b135 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemTablePermissionsIT.java
@@ -45,10 +45,12 @@ import org.apache.phoenix.query.QueryServices;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
/**
* Test that verifies a user can read Phoenix tables with a minimal set of permissions.
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class SystemTablePermissionsIT {
private static String SUPERUSER;
http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca8bd4a9/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
new file mode 100644
index 0000000..469dd21
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecTest.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.codec.Codec;
+import org.apache.hadoop.hbase.io.util.LRUDictionary;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.hbase.index.IndexTestingUtils;
+import org.apache.phoenix.hbase.index.wal.IndexedKeyValue;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Simple test to read/write simple files via our custom {@link WALCellCodec} to ensure properly
+ * encoding/decoding without going through a cluster.
+ */
+public class ReadWriteKeyValuesWithCodecTest {
+
+ private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ private static final byte[] ROW = Bytes.toBytes("row");
+ private static final byte[] FAMILY = Bytes.toBytes("family");
+
+ @BeforeClass
+ public static void setupCodec() {
+ Configuration conf = UTIL.getConfiguration();
+ IndexTestingUtils.setupConfig(conf);
+ conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
+ }
+
+ @Test
+ public void testWithoutCompression() throws Exception {
+ // get the FS ready to read/write the edits
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withoutCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ writeReadAndVerify(null, fs, edits, testFile);
+ }
+
+ @Test
+ public void testWithCompression() throws Exception {
+ // get the FS ready to read/write the edit
+ Path testDir = UTIL.getDataTestDir("TestReadWriteCustomEdits_withCompression");
+ Path testFile = new Path(testDir, "testfile");
+ FileSystem fs = UTIL.getTestFileSystem();
+
+ List<WALEdit> edits = getEdits();
+ CompressionContext compression = new CompressionContext(LRUDictionary.class, false, false);
+ writeReadAndVerify(compression, fs, edits, testFile);
+ }
+
+ /**
+ * @return a bunch of {@link WALEdit}s that test a range of serialization possibilities.
+ */
+ private List<WALEdit> getEdits() {
+ // Build up a couple of edits
+ List<WALEdit> edits = new ArrayList<WALEdit>();
+ Put p = new Put(ROW);
+ p.add(FAMILY, null, Bytes.toBytes("v1"));
+
+ WALEdit withPut = new WALEdit();
+ addMutation(withPut, p, FAMILY);
+ edits.add(withPut);
+
+ Delete d = new Delete(ROW);
+ d.deleteColumn(FAMILY, null);
+ WALEdit withDelete = new WALEdit();
+ addMutation(withDelete, d, FAMILY);
+ edits.add(withDelete);
+
+ WALEdit withPutsAndDeletes = new WALEdit();
+ addMutation(withPutsAndDeletes, d, FAMILY);
+ addMutation(withPutsAndDeletes, p, FAMILY);
+ edits.add(withPutsAndDeletes);
+
+ WALEdit justIndexUpdates = new WALEdit();
+ byte[] table = Bytes.toBytes("targetTable");
+ IndexedKeyValue ikv = new IndexedKeyValue(table, p);
+ justIndexUpdates.add(ikv);
+ edits.add(justIndexUpdates);
+
+ WALEdit mixed = new WALEdit();
+ addMutation(mixed, d, FAMILY);
+ mixed.add(ikv);
+ addMutation(mixed, p, FAMILY);
+ edits.add(mixed);
+
+ return edits;
+ }
+
+ /**
+ * Add all the {@link KeyValue}s in the {@link Mutation}, for the pass family, to the given
+ * {@link WALEdit}.
+ */
+ private void addMutation(WALEdit edit, Mutation m, byte[] family) {
+ List<Cell> kvs = m.getFamilyCellMap().get(FAMILY);
+ for (Cell kv : kvs) {
+ edit.add(KeyValueUtil.ensureKeyValue(kv));
+ }
+ }
+
+
+ private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, FSDataOutputStream out) throws IOException {
+ out.writeInt(kvs.size());
+ Codec.Encoder cellEncoder = codec.getEncoder(out);
+ // We interleave the two lists for code simplicity
+ for (Cell kv : kvs) {
+ cellEncoder.write(kv);
+ }
+ }
+
+ /**
+ * Write the edits to the specified path on the {@link FileSystem} using the given codec and then
+ * read them back in and ensure that we read the same thing we wrote.
+ */
+ private void writeReadAndVerify(final CompressionContext compressionContext, FileSystem fs, List<WALEdit> edits,
+ Path testFile) throws IOException {
+
+ WALCellCodec codec = WALCellCodec.create(UTIL.getConfiguration(), compressionContext);
+ // write the edits out
+ FSDataOutputStream out = fs.create(testFile);
+ for (WALEdit edit : edits) {
+ writeWALEdit(codec, edit.getCells(), out);
+ }
+ out.close();
+
+ // read in the edits
+ FSDataInputStream in = fs.open(testFile);
+ List<WALEdit> read = new ArrayList<WALEdit>();
+ for (int i = 0; i < edits.size(); i++) {
+ WALEdit edit = new WALEdit();
+ int numEdits = in.readInt();
+ edit.readFromCells(codec.getDecoder(in), numEdits);
+ read.add(edit);
+ }
+ in.close();
+
+ // make sure the read edits match the written
+ for(int i=0; i< edits.size(); i++){
+ WALEdit expected = edits.get(i);
+ WALEdit found = read.get(i);
+ for(int j=0; j< expected.getCells().size(); j++){
+ Cell fkv = found.getCells().get(j);
+ Cell ekv = expected.getCells().get(j);
+ assertEquals("KV mismatch for edit! Expected: "+expected+", but found: "+found, ekv, fkv);
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca8bd4a9/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index afb06ae..c705e2d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -25,12 +25,14 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.jdbc.PhoenixDriver;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.AfterClass;
+import org.junit.experimental.categories.Category;
import java.io.File;
import java.io.IOException;
@@ -44,6 +46,7 @@ import static org.junit.Assert.fail;
/**
* Base class for all Hive Phoenix integration tests that may be run with Tez or MR mini cluster
*/
+@Category(NeedsOwnMiniClusterTest.class)
public class BaseHivePhoenixStoreIT {
private static final Log LOG = LogFactory.getLog(BaseHivePhoenixStoreIT.class);
http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca8bd4a9/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index cf12a80..1828818 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -18,9 +18,11 @@
package org.apache.phoenix.hive;
import org.apache.hadoop.fs.Path;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
import org.apache.phoenix.util.StringUtil;
import org.junit.Ignore;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -31,6 +33,7 @@ import static org.junit.Assert.assertTrue;
* Test methods only. All supporting methods should be placed to BaseHivePhoenixStoreIT
*/
+@Category(NeedsOwnMiniClusterTest.class)
@Ignore("This class contains only test methods and should not be executed directly")
public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {