You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@rya.apache.org by mi...@apache.org on 2015/12/22 17:49:49 UTC

[25/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/IteratorTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/IteratorTest.java b/partition/common-query/src/test/java/IteratorTest.java
deleted file mode 100644
index 1b5cf14..0000000
--- a/partition/common-query/src/test/java/IteratorTest.java
+++ /dev/null
@@ -1,554 +0,0 @@
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.CellLevelFilteringIterator;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.ConversionIterator;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedMinIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.UniqueIterator;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-
-public class IteratorTest {
-	private Connector cellLevelConn;
-	private Connector serializedConn;
-	
-	private static final String TABLE = "partition";
-	private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-	
-	public IteratorTest() {
-		
-	}
-	
-	protected Connector getCellLevelConnector() {
-		if (cellLevelConn == null) {
-			cellLevelConn = SampleData.initConnector();
-			SampleData.writeDenCellLevel(cellLevelConn, SampleData.sampleData());
-		}
-		return cellLevelConn;
-	}
-	
-	protected Connector getSerializedConnector() {
-		if (serializedConn == null) {
-			serializedConn = SampleData.initConnector();
-			SampleData.writeDenSerialized(serializedConn, SampleData.sampleData());
-			SampleData.writeDenProvenance(serializedConn);
-			SampleData.writeMinIndexes(serializedConn);
-		}
-		return serializedConn;
-	}
-	
-	protected Scanner getProvenanceScanner() {
-		Connector c = getSerializedConnector();
-		try {
-			return c.createScanner("provenance", AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner getCellLevelScanner() {
-		Connector c = getCellLevelConnector();
-		try {
-			return c.createScanner(TABLE, AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner getSerializedScanner() {
-		Connector c = getSerializedConnector();
-		try {
-			return c.createScanner(TABLE, AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner setUpIntersectingIterator(Scanner s, Text[] terms, boolean multiDoc) {
-		try {
-			s.setScanIterators(50, GMDenIntersectingIterator.class.getName(), "ii");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, "index");
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, "event");
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + multiDoc);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(terms));
-		return s;
-	}
-	
-	protected String checkSerialized(Scanner s) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		for (Entry<Key, Value> e: s) {
-			if (!first) {
-				sb.append(",");
-			} else {
-				first = false;
-			}
-			
-			String colq = e.getKey().getColumnQualifier().toString();
-			
-			sb.append(colq);
-		}
-		return sb.toString();
-	}
-	
-	protected String checkCellLevel(Scanner s) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		for (Entry<Key, Value> e: s) {
-			String colq = e.getKey().getColumnQualifier().toString();
-			int i = colq.indexOf("\u0000");
-			if (i > -1) {
-				if (!first) {
-					sb.append(",");
-				} else {
-					first = false;
-				}
-				sb.append(colq.substring(0, i));
-				sb.append(".");
-				sb.append(colq.substring(i + 1));
-				sb.append("=");
-				sb.append(e.getValue().toString());
-			}
-		}
-		return sb.toString();
-	}
-	
-	@Test
-	public void testSerializedSingleDuplicate() {
-		Text[] terms = new Text[] {
-			new Text("A"),
-			new Text("A")
-		};
-		
-		String test = "01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelSingleDuplicate() {
-		Text[] terms = new Text[] {
-			new Text("A"),
-			new Text("A")
-		};
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedTwoTerms() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		// all the evens will come first
-		String test = "02,01,03";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelTwoTerms() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedTwoTermsWithRange() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range(new Key(new Text("0")), true, new Key(new Text("1")), false));
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelTwoTermsWithRange() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range(new Key(new Text("0")), true, new Key(new Text("1")), false));
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedSingleRange() {
-		Text[] terms = new Text[] {
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true)),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSerializedMultiRange() {
-		Text[] terms = new Text[] {
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true)),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "B", true, "C", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSerializedTermAndRange() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "E", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	protected Scanner setUpSortedRangeIterator(Scanner s, boolean multiDoc) {
-		try {
-			s.setScanIterators(50, SortedRangeIterator.class.getName(), "ri");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, "index");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, "event");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, "A");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, "C");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_START_INCLUSIVE, "true");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_END_INCLUSIVE, "true");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + multiDoc);
-			return s;
-		} catch (IOException e) {
-			e.printStackTrace();
-			return null;
-		}
-	}
-	
-	@Test
-	public void testSerializedSortedRangeIterator() {
-		Scanner s = setUpSortedRangeIterator(getSerializedScanner(), false);
-		String test = "02,01,03";
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelSortedRangeIterator() {
-		Scanner s = setUpSortedRangeIterator(getCellLevelScanner(), true);
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testUniqueIterator() {
-		Scanner s = getProvenanceScanner();
-		try {
-			s.setScanIterators(50, UniqueIterator.class.getName(), "skipper");
-			Key start = new Key(new Text("sid1"));
-			s.setRange(new Range(start, start.followingKey(PartialKey.ROW)));
-			
-			int count = 0;
-			for (Entry<Key, Value> e: s) {
-				count++;
-			}
-			
-			assertEquals(count, 3);
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	protected Scanner setUpConversionIterator(Scanner s) {
-		String[] conversions = new String[] {
-			"field0 + 10",
-			"field1 - 10",
-			"field2 * 10",
-			"field3 / 10",
-			"field4 % 10"
-		};
-		
-		try {
-			s.setScanIterators(50, ConversionIterator.class.getName(), "ci");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("ci", ConversionIterator.OPTION_CONVERSIONS, ConversionIterator.encodeConversions(conversions));
-		Key start = new Key(new Text("1"), new Text("event"), new Text("01"));
-		s.setRange(new Range(start, true, start.followingKey(PartialKey.ROW_COLFAM_COLQUAL), false));
-		
-		return s;
-	}
-	
-	@Test
-	public void testConversionIteratorSerialized() {
-		Scanner s = getSerializedScanner();
-		s = setUpConversionIterator(s);
-		
-		CBConverter c = new CBConverter();
-		
-		boolean test = true;
-		Map<String, Double> expected = new HashMap<String, Double>();
-		
-		expected.put("field0", 20.0);
-		expected.put("field1", 1.0);
-		expected.put("field2", 120.0);
-		expected.put("field3", 1.3);
-		expected.put("field4", 4.0);
-		
-		Map<String, String> record;
-		
-		for (Entry<Key, Value> e: s) {
-			record = c.toMap(e.getKey(), e.getValue());
-			
-			for (Entry<String, String> pair: record.entrySet()) {
-				test = test && expected.get(pair.getKey()).equals(new Double(Double.parseDouble(record.get(pair.getKey()))));
-			}
-		}
-		
-		assertTrue(test);
-	}
-	
-	@Test
-	public void testConversionIteratorCellLevel() {
-		Scanner s = getCellLevelScanner();
-		s = setUpConversionIterator(s);
-		s.setScanIteratorOption("ci", ConversionIterator.OPTION_MULTI_DOC, "true");
-		
-		boolean test = true;
-		Map<String, Double> expected = new HashMap<String, Double>();
-		
-		expected.put("field0", 20.0);
-		expected.put("field1", 1.0);
-		expected.put("field2", 120.0);
-		expected.put("field3", 1.3);
-		expected.put("field4", 4.0);
-		
-		for (Entry<Key, Value> e: s) {
-			String field = getField(e.getKey());
-			if (field != null) {
-				test = test && expected.get(field).equals(new Double(Double.parseDouble(e.getValue().toString())));
-			}
-		}
-		
-		assertTrue(test);
-	}
-	
-	protected String getField(Key key) {
-		String colq = key.getColumnQualifier().toString();
-		int start = colq.indexOf("\u0000");
-		if (start == -1) {
-			return null;
-		}
-		
-		int end = colq.indexOf("\u0000", start + 1);
-		if (end == -1) {
-			end = colq.length();
-		}
-		
-		return colq.substring(start + 1, end);
-	}
-	
-	@Test
-	public void testCellLevelOGCFilter() {
-		Scanner s = getCellLevelScanner();
-		s.fetchColumnFamily(new Text("event"));
-		
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsBetween><PropertyName>field0</PropertyName>"
-			+ "<LowerBoundary><Literal>A</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>C</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testMultiLevelIterator() {
-		Scanner s = getCellLevelScanner();
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		s = setUpIntersectingIterator(s, terms, true);
-		
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsEqualTo><PropertyName>field0</PropertyName>"
-			+ "<Literal>A</Literal>"
-			+ "</PropertyIsEqualTo>");
-		
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testMultiLevelIterator2() {
-		Scanner s = getCellLevelScanner();
-		s = setUpSortedRangeIterator(s, true);
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsEqualTo><PropertyName>field0</PropertyName>"
-			+ "<Literal>A</Literal>"
-			+ "</PropertyIsEqualTo>");
-		
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testCellLevelRecordIterator() {
-		Scanner s = getCellLevelScanner();
-		s = setUpSortedRangeIterator(s, true);
-		try {
-			s.setScanIterators(60, CellLevelRecordIterator.class.getName(), "recordItr");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-//		for (Entry<Key, Value> e: s) {
-//			String v = e.getValue().toString();
-//			v = v.replaceAll("\\u0000", ",");
-//			v = v.replaceAll("\\uFFFD", "=");
-//			System.out.println(e.getKey() + "\t" + v);
-//		}
-		String test = "02,01,03";
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testIntersectionWithoutDocLookup() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		// all the evens will come first
-		String test = "\u000002,\u000001,\u000003";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_DOC_LOOKUP, "false");
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSimpleNot() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text("F")
-		};
-		
-		boolean[] nots = new boolean[] {
-			false,
-			true
-		};
-		
-		String test="01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.notFlagOptionName, GMDenIntersectingIterator.encodeBooleans(nots));
-		s.setRange(new Range());
-		
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testRangeNot() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "F", true, "H", true))
-		};
-		
-		boolean[] nots = new boolean[] {
-			false,
-			true
-		};
-		
-		String test = "01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.notFlagOptionName, GMDenIntersectingIterator.encodeBooleans(nots));
-		s.setRange(new Range());
-		
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testMinIteratorOnLastKeys() {
-		Scanner s = getSerializedScanner();
-		try {
-			s.setScanIterators(50, SortedMinIterator.class.getName(), "min");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("min", SortedMinIterator.OPTION_PREFIX, "z");
-		s.setRange(new Range());
-		
-		String test = "02,04,06,08,10,01,03,05,07,09";
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/JTSFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/JTSFilterTest.java b/partition/common-query/src/test/java/JTSFilterTest.java
deleted file mode 100644
index 8224f64..0000000
--- a/partition/common-query/src/test/java/JTSFilterTest.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.filter.jts.JTSFilter;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.security.Authorizations;
-
-/**
- *
- * @author rashah
- */
-public class JTSFilterTest
-{
-
-  private Connector cellLevelConn;
-  private Connector serializedConn;
-  private static final String TABLE = "partition";
-  private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-
-
-
-  protected Connector getSerializedConnector()
-  {
-    if (serializedConn == null)
-    {
-      serializedConn = SampleJTSData.initConnector();
-      SampleJTSData.writeDenSerialized(serializedConn, SampleJTSData.sampleData());
-    }
-    return serializedConn;
-  }
-
-
-
-  protected Scanner getSerializedScanner()
-  {
-    Connector c = getSerializedConnector();
-    try
-    {
-      return c.createScanner(TABLE, AUTHS);
-    }
-    catch (TableNotFoundException e)
-    {
-      return null;
-    }
-  }
-
-  protected Scanner setUpJTSFilter(Scanner s, String latitude, String longitude, boolean change_name)
-  {
-    try
-    {
-  
-      s.setScanIterators(50, FilteringIterator.class.getName(), "gvdf");
-      s.setScanIteratorOption("gvdf", "0", JTSFilter.class.getName());
-      s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONCenterPointLat, latitude);
-      s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONCenterPointLon, longitude);
-      if (change_name)
-          s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONGeometryKeyName, "beam-footprint");
-
-
-    }
-    catch (IOException e)
-    {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-    }
-    return s;
-  }
-
-  protected String checkSerialized(Scanner s)
-  {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (Entry<Key, Value> e : s)
-    {
-
-      if (!first)
-      {
-        sb.append(",");
-      }
-      else
-      {
-        first = false;
-      }
-
-      String colq = e.getKey().getColumnQualifier().toString();
-
-      sb.append(colq);
-    }
-    return sb.toString();
-  }
-
-
-  @Test
-  public void testNoResults()
-  {
-    //London is in niether - 51°30'0.00"N   0° 7'0.00"W
-    String latitude = "51.5";
-    String longitude = "0.11";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-//    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).isEmpty());
-  }
-
-
-  @Test
-  public void testOneResultAmerica()
-  {
-    //This is North America
-    //Points  39°44'21.00"N 104°59'3.00"W (Denver) are in the footprint
-    String latitude = "33";
-    String longitude = "-93.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("02"));
-  }
-
-
-  @Test
-  public void testOneResultAustralia()
-  {
-    //This is Australia
-    //Points like 22S 135E are in the beam
-    String latitude = "-9";
-    String longitude = "100.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("01"));
-  }
-
-  @Test
-  public void testOneResultHawaii()
-  {
-    // -164 40 - somewhere near hawaii
-
-    //This is Australia
-    //Points like 22S 135E are in the beam
-    String latitude = "40";
-    String longitude = "-164.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, true);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("03"));
-  }
-
-
-  @Test
-  public void testDummyTest()
-  {
-    assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/OGCFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/OGCFilterTest.java b/partition/common-query/src/test/java/OGCFilterTest.java
deleted file mode 100644
index fd54945..0000000
--- a/partition/common-query/src/test/java/OGCFilterTest.java
+++ /dev/null
@@ -1,163 +0,0 @@
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-
-import static org.junit.Assert.*;
-
-public class OGCFilterTest {
-	private Key testKey = new Key(new Text("row"), new Text("colf"), new Text("colq"));
-	private Value testValue = new Value("uuid~event\uFFFDmy-event-hash-1\u0000date\uFFFD20100819\u0000time~dss\uFFFD212706.000\u0000frequency\uFFFD3.368248181443644E8\u0000latitude\uFFFD48.74571142707959\u0000longitude\uFFFD13.865561564126812\u0000altitude\uFFFD1047.0\u0000datetime\uFFFD2010-08-19T21:27:06.000Z\u0000test~key\uFFFD\u0000key\uFFFDa\uFFFDb".getBytes());
-
-	public OGCFilterTest() {
-
-	}
-
-	private OGCFilter getFilter(String filter) {
-		OGCFilter f = new OGCFilter();
-		Map<String, String> options = new HashMap<String, String>();
-		options.put(OGCFilter.OPTION_FILTER, filter);
-		f.init(options);
-		return f;
-	}
-
-	@Test
-	public void testBBOX() {
-		OGCFilter f = getFilter("<BBOX><gml:Envelope>"
-			+ "<gml:LowerCorner>13 48</gml:LowerCorner>"
-			+ "<gml:UpperCorner>14 49</gml:UpperCorner>"
-			+ "</gml:Envelope></BBOX>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testBetweenStr() {
-		OGCFilter f = getFilter("<PropertyIsBetween><PropertyName>datetime</PropertyName>"
-			+ "<LowerBoundary><Literal>2010-08-19</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>2010-08-20</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testBetweenNum() {
-		OGCFilter f = getFilter("<PropertyIsBetween><PropertyName>frequency</PropertyName>"
-			+ "<LowerBoundary><Literal>330000000</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>340000000</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testEqualStr() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>uuid~event</PropertyName><Literal>my-event-hash-1</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testEqualNum() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testGreaterThanStr() {
-		OGCFilter f = getFilter("<PropertyIsGreaterThan><PropertyName>datetime</PropertyName><Literal>2010-08-15</Literal></PropertyIsGreaterThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testGreaterThanNum() {
-		OGCFilter f = getFilter("<PropertyIsGreaterThan><PropertyName>altitude</PropertyName><Literal>1000</Literal></PropertyIsGreaterThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLessThanStr() {
-		OGCFilter f = getFilter("<PropertyIsLessThan><PropertyName>datetime</PropertyName><Literal>2010-08-20</Literal></PropertyIsLessThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLessThanNum() {
-		OGCFilter f = getFilter("<PropertyIsLessThan><PropertyName>altitude</PropertyName><Literal>1200</Literal></PropertyIsLessThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLike() {
-		OGCFilter f = getFilter("<PropertyIsLike><PropertyName>uuid~event</PropertyName><Literal>*event*</Literal></PropertyIsLike>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNotEqualNum() {
-		OGCFilter f = getFilter("<PropertyIsNotEqualTo><PropertyName>altitude</PropertyName><Literal>1046</Literal></PropertyIsNotEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNull() {
-		OGCFilter f = getFilter("<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNot() {
-		OGCFilter f = getFilter("<Not><PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo></Not>");
-		assertFalse(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testAnd() {
-		OGCFilter f = getFilter("<And>"
-			+ "<PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo>"
-			+ "<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>"
-			+ "</And>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testOr() {
-		OGCFilter f = getFilter("<Or>"
-			+ "<PropertyIsLike><PropertyName>uuid~event</PropertyName><Literal>*event*</Literal></PropertyIsLike>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "</Or>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNand() {
-		OGCFilter f = getFilter("<Not><And>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>"
-			+ "</And></Not>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNor() {
-		OGCFilter f = getFilter("<Not>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "<PropertyIsNull><PropertyName>altitude</PropertyName></PropertyIsNull>"
-			+ "</Not>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testParse() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>key</PropertyName><Literal>a</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/SampleData.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/SampleData.java b/partition/common-query/src/test/java/SampleData.java
deleted file mode 100644
index 071076b..0000000
--- a/partition/common-query/src/test/java/SampleData.java
+++ /dev/null
@@ -1,228 +0,0 @@
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-
-public class SampleData {
-	public static int NUM_PARTITIONS = 2;
-	public static int NUM_SAMPLES = 10;
-	
-	public static Connector initConnector() {
-		Instance instance = new MockInstance();
-		
-		try {
-			Connector connector = instance.getConnector("root", "password".getBytes());
-			
-			// set up table
-			connector.tableOperations().create("partition");
-			connector.tableOperations().create("provenance");
-			
-			// set up root's auths
-			connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-			
-			return connector;
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableExistsException e) {
-			e.printStackTrace();
-		}
-		
-		return null;
-	}
-	
-	public static Collection<Map<String, String>> sampleData() {
-		List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-		Map<String, String> item;
-		
-		for (int i = 0; i < NUM_SAMPLES; i++) {
-			item = new HashMap<String, String>();
-			for (int j = 0; j < 5; j++) {
-				item.put("field" + j , new String(new char[] {(char) ('A' + ((j + i) % 26))}));
-			}
-			list.add(item);
-		}
-		return list;
-	}
-	
-	public static void writeDenCellLevel(Connector connector, Collection<Map<String, String>> data) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			int count = 0;
-			Mutation m;
-			for (Map<String, String> object: data) {
-				count++;
-				String id = (count < 10 ? "0" + count: "" + count);
-				Text partition = new Text("" + (count % NUM_PARTITIONS));
-				
-				// write dummy record
-				m = new Mutation(partition);
-				m.put("event", id, "");
-				writer.addMutation(m);
-				
-				for (Entry<String, String> entry: object.entrySet()) {
-					// write the event mutation
-					m = new Mutation(partition);
-					m.put("event", id + "\u0000" + entry.getKey(), entry.getValue());
-					writer.addMutation(m);
-					
-					// write the general index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-					
-					// write the specific index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-				}
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			int count = 0;
-			Mutation m;
-			for (Map<String, String> object: data) {
-				count++;
-				String id = (count < 10 ? "0" + count: "" + count);
-				Text partition = new Text("" + (count % NUM_PARTITIONS));
-				
-				StringBuilder value = new StringBuilder();
-				boolean first = true;
-				for (Entry<String, String> entry: object.entrySet()) {
-					if (!first) {
-						value.append("\u0000");
-					} else {
-						first = false;
-					}
-					value.append(entry.getKey());
-					value.append("\uFFFD");
-					value.append(entry.getValue());
-					
-					// write the general index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-					
-					// write the specific index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-				}
-				
-				// write the event mutation
-				m = new Mutation(partition);
-				m.put("event", id, value.toString());
-				writer.addMutation(m);
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeDenProvenance(Connector connector) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("provenance");
-			} else {
-				writer = connector.createBatchWriter("provenance", 200000, 10000, 1);
-			}
-			Mutation m;
-			for (int sid = 1; sid <= 2; sid++) {
-				for (int time = 1; time <= 3; time++) {
-					for (int uuid = 1; uuid <= (6 + 2 * time); uuid++) {
-						m = new Mutation(new Text("sid" + sid));
-						m.put("time" + time, "uuid-" + Integer.toHexString(uuid), "");
-						writer.addMutation(m);
-					}
-				}
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeMinIndexes(Connector connector) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			Mutation m;
-			for (int i = 1; i <= NUM_SAMPLES; i++) {
-				m = new Mutation(new Text("" + (i % NUM_PARTITIONS)));
-				
-				String id = (i < 10 ? "0" + i: "" + i);
-				
-				m.put("index", "z_" + id + "_rdate\u0000" + id, "");
-				writer.addMutation(m);
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/SampleGVData.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/SampleGVData.java b/partition/common-query/src/test/java/SampleGVData.java
deleted file mode 100644
index d8168de..0000000
--- a/partition/common-query/src/test/java/SampleGVData.java
+++ /dev/null
@@ -1,182 +0,0 @@
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-// For use in testing the Date Filter and Frequency Filter classes
-public class SampleGVData
-{
-
-  public static int NUM_PARTITIONS = 2;
-
-
-  public static Connector initConnector()
-  {
-    Instance instance = new MockInstance();
-
-    try
-    {
-      Connector connector = instance.getConnector("root", "password".getBytes());
-
-      // set up table
-      connector.tableOperations().create("partition");
-
-      // set up root's auths
-      connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-
-      return connector;
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableExistsException e)
-    {
-      e.printStackTrace();
-    }
-
-    return null;
-  }
-
-  public static Collection<Map<String, String>> sampleData()
-  {
-    List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-    Map<String, String> item;
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-
-    //This one is like RB
-    item.put("date-start",  "2009-01-01");
-    item.put("date-end",    "2011-02-24");
-    item.put("date-update", "2011-02-24T00:00:00Z");
-    item.put("frequency",  "1250000000");
-    item.put("bandwidth",   "500000000");
-    item.put("version",     "1");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    list.add(item);
-
-    //This one is like GV
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    item.put("date-start",  "2010-01-01");
-    item.put("date-update", "2010-01-23");
-    item.put("frequency",  "1150000000");
-    item.put("bandwidth",   "300000000");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    item.put("date-start",  "2009-01-01");
-    item.put("date-end",    "2011-02-24");
-    item.put("date-update", "2008-01-23");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    list.add(item);
-
-    return list;
-  }
-
-
-  public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data)
-  {
-    // write sample data
-    MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-    try
-    {
-      BatchWriter writer;
-      if (mtbw != null)
-      {
-        writer = mtbw.getBatchWriter("partition");
-      }
-      else
-      {
-        writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-      }
-      int count = 0;
-      Mutation m;
-      for (Map<String, String> object : data)
-      {
-        count++;
-        String id = (count < 10 ? "0" + count : "" + count);
-        Text partition = new Text("" + (count % NUM_PARTITIONS));
-
-        StringBuilder value = new StringBuilder();
-        boolean first = true;
-        for (Entry<String, String> entry : object.entrySet())
-        {
-          if (!first)
-          {
-            value.append("\u0000");
-          }
-          else
-          {
-            first = false;
-          }
-          value.append(entry.getKey());
-          value.append("\uFFFD");
-          value.append(entry.getValue());
-
-          // write the general index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-
-          // write the specific index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-        }
-
-        // write the event mutation
-        m = new Mutation(partition);
-        m.put("event", id, value.toString());
-        writer.addMutation(m);
-      }
-      writer.close();
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableNotFoundException e)
-    {
-      e.printStackTrace();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/SampleJTSData.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/SampleJTSData.java b/partition/common-query/src/test/java/SampleJTSData.java
deleted file mode 100644
index 41df658..0000000
--- a/partition/common-query/src/test/java/SampleJTSData.java
+++ /dev/null
@@ -1,171 +0,0 @@
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-// For use in testing the Date Filter and Frequency Filter classes
-public class SampleJTSData
-{
-
-  public static int NUM_PARTITIONS = 2;
-
-
-  public static Connector initConnector()
-  {
-    Instance instance = new MockInstance();
-
-    try
-    {
-      Connector connector = instance.getConnector("root", "password".getBytes());
-
-      // set up table
-      connector.tableOperations().create("partition");
-
-      // set up root's auths
-      connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-
-      return connector;
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableExistsException e)
-    {
-      e.printStackTrace();
-    }
-
-    return null;
-  }
-
-  public static Collection<Map<String, String>> sampleData()
-  {
-    List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-    Map<String, String> item;
-
-    item = new HashMap<String, String>();
-    item.put("geometry-contour",  "SDO_GEOMETRY(2007, 8307, NULL, SDO_ELEM_INFO_ARRAY(1, 1003, 1), SDO_ORDINATE_ARRAY(91.985, -12.108, 94.657, -12.059, 98.486, -11.988, 101.385, -12.296, 102.911, -12.569, 103.93, -12.852, 105.005, -12.531, 106.37, -12.204, 108.446, -11.503, 109.585, -10.88, 110.144, -10.207, 108.609, -9.573, 106.05, -8.535, 104.145, -7.606, 102.191, -7.522, 99.522, -7.691, 97.64, -7.606, 95.482, -7.947, 94.546, -8.084, 92.465, -8.605, 90.554, -9.366, 90.197, -10.436, 89.84, -11.729, 90.554, -12.175, 91.985, -12.108))");
-    item.put("beam-name",    "OPTUS D1 Ku-BAND NATIONAL A & B AUSTRALIA Downlink");
-    list.add(item);
-    //This is Australia
-    //Points like 22S 135E are in the beam
-
-    //This one is like GV
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "AMC 1 Ku-BAND ZONAL NORTH AMERICA Down HV");
-    item.put("geometry-contour",   "SDO_GEOMETRY(2007, 8307, NULL, SDO_ELEM_INFO_ARRAY(1, 1003, 1), SDO_ORDINATE_ARRAY(-70.838, 39.967, -70.506, 40.331, -70.698, 41.679, -71.179, 42.401, -71.578, 42.38, -72.994, 42.924, -74.353, 43.242, -75.715, 43.26, -77.318, 42.981, -78.684, 42.774, -80.05, 42.491, -82.005, 42.517, -83.608, 42.312, -84.977, 41.805, -86.58, 41.525, -88.127, 41.02, -89.731, 40.741, -90.905, 41.582, -92.264, 41.9, -93.861, 42.147, -95.411, 41.341, -96.257, 40.076, -97.222, 38.737, -98.011, 37.17, -98.031, 35.593, -97.691, 34.312, -96.875, 33.25, -97.307, 31.904, -97.916, 30.561, -98.702, 29.295, -99.134, 27.949, -98.14, 26.884, -97.205, 25.821, -95.842, 25.803, -94.42, 25.784, -92.876, 26.064, -91.277, 26.043, -90.085, 26.553, -88.729, 26.01, -87.38, 24.941, -86.031, 23.797, -84.616, 23.253, -83.256, 23.01, -81.887, 23.517, -80.866, 24.555, -80.254, 26.124, -79.642, 27.693, -78.444, 28.728, -77.486, 29.542, -76.463, 30.805, -76.088, 32.377, -75.656, 33.723, -76.051,
  35.305, -75.442, 36.649, -74.426, 37.386, -73.228, 38.422, -72.032, 39.232, -70.838, 39.967))");
-    list.add(item);
-    //This is North America
-    //Points  39°44'21.00"N 104°59'3.00"W (Denver) are in the footprint
-
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "testa");
-    item.put("beam-footprint",   "MULTIPOLYGON (((-169.286 40.431, -164.971 39.992, -155.397 38.482, -146.566 36.233, -136.975 32.539, -128.124 27.742, -121.946 24.548, -116.849 21.339, -112.156 17.479, -109.391 14.206, -107.301 11.715, -105.274 9.477, -103.443 8.229, -102.108 7.7, -99.109 7.428, -96.681 7.745, -93.894 8.843, -89.917 11.687, -85.953 15.017, -81.148 17.266, -78.145 17.986, -75.582 17.887, -68.1 17.987, -64.696 18.493, -61.445 19.38, -60.094 20.288, -59.315 21.564, -57.026 26.51, -55.089 30.962, -53.59 33.657, -52.495 34.691, -50.468 36.204, -46.146 38.672, -41.684 40.663, -37.914 42.055, -33.806 43.082, -27.523 44.149, -21.645 44.96, -16.578 45.406, -13.807 45.771, -14.929 50.108, -16.186 53.919, -17.051 56.0, -18.388 58.824, -19.861 61.567, -21.807 64.188, -23.104 65.742, -25.28 67.904, -27.699 69.823, -28.955 70.728, -32.415 72.768, -34.968 73.998, -38.468 75.309, -48.292 73.025, -56.545 71.12, -64.023 70.474, -72.753 70.357, -78.41 70.827, -80.466 71.093, -82.412 
 71.876, -83.02 72.944, -83.175 74.04, -82.493 74.782, -82.412 75.552, -82.697 76.778, -84.041 78.398, -86.316 81.078, -104.098 80.819, -110.861 80.482, -115.73 80.17, -120.936 79.669, -125.84 79.176, -126.696 79.02, -134.316 77.732, -139.505 76.478, -144.823 74.826, -148.231 73.417, -151.517 71.687, -153.87 70.165, -154.536 69.672, -155.868 68.678, -156.482 68.098, -158.281 66.421, -159.716 64.804, -160.996 63.126, -161.878 61.786, -163.046 59.875, -164.369 57.254, -165.563 54.479, -166.73 51.089, -167.811 47.267, -168.581 44.041, -169.286 40.431)), ((-171.333 23.244, -171.523 18.894, -170.127 18.986, -161.559 18.555, -156.977 18.134, -153.574 18.116, -151.108 18.324, -149.947 18.45, -149.018 18.957, -148.515 19.822, -148.524 20.914, -149.018 21.766, -149.947 22.272, -152.185 23.054, -155.563 23.434, -158.075 23.75, -160.272 24.034, -162.184 24.008, -163.514 23.99, -164.595 23.976, -166.52 23.687, -169.159 23.18, -171.333 23.244)))");
-    list.add(item);
-// this point should be in there...
-    // -164 40 - somewhere near hawaii
-
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "testb");
-    item.put("beam-footprint",   "POLYGON ((-140.153 34.772, -140.341 33.272, -137.024 33.026, -132.723 32.369, -130.947 31.916, -128.664 31.225, -125.293 29.612, -121.813 27.871, -118.699 25.892, -115.589 23.79, -112.593 21.875, -109.136 19.335, -106.939 16.701, -105.006 14.97, -104.195 14.407, -103.049 13.659, -100.363 12.717, -98.063 12.288, -94.299 11.612, -90.825 11.097, -87.997 11.584, -86.815 12.109, -86.163 12.893, -85.014 14.342, -83.804 15.788, -82.104 16.998, -80.413 17.269, -78.005 16.574, -76.181 16.531, -74.65 16.68, -73.552 17.392, -72.957 18.3, -72.917 19.651, -73.526 21.325, -74.913 23.018, -76.036 24.519, -76.159 26.428, -75.741 28.447, -74.257 30.072, -72.771 31.331, -70.517 34.328, -69.638 36.04, -68.624 39.467, -68.015 41.851, -67.607 43.501, -67.548 45.528, -67.586 47.308, -68.601 49.066, -69.868 50.07, -71.621 50.778, -73.285 50.888, -74.9 50.926, -76.994 50.975, -79.332 50.846, -81.066 50.887, -83.842 51.136, -86.569 51.016, -87.95 50.864, -90.831 50.563, -94
 .27 50.644, -98.068 50.733, -102.937 51.032, -106.455 51.484, -109.973 51.936, -114.119 52.402, -117.363 53.031, -119.899 53.276, -123.243 53.539, -127.017 54.427, -130.519 55.431, -133.643 56.058, -134.826 56.279, -135.354 55.029, -135.792 53.864, -136.168965072136 52.8279962761917, -136.169 52.828, -136.169497186166 52.8264970826432, -136.192 52.763, -136.556548517884 51.6453176911637, -136.703232746756 51.2152965828266, -136.781220290925 50.9919311116929, -136.793 50.959, -136.80274055379 50.9259886895048, -136.992 50.295, -137.200898649547 49.5808675274021, -137.202 49.581, -137.200962495599 49.5806459535167, -137.360714473458 49.0197683891632, -137.459 48.677, -137.462166719028 48.6649126473121, -137.471 48.634, -137.515105536699 48.4619710228524, -137.74710368039 47.5528216167105, -137.793718522461 47.3758260237407, -137.854 47.152, -137.977773277882 46.6610808974241, -138.044 46.403, -138.330834102374 45.1674736036557, -138.365 45.019, -138.38180854655 44.9421315900087, -138.
 449801069917 44.6389849661384, -138.485 44.484, -138.497077239724 44.4262941289417, -138.536 44.25, -138.622787032392 43.8206200438395, -138.743816168807 43.232032787661, -138.981390224617 42.0843314825185, -138.989 42.048, -138.990605533614 42.0389442888447, -138.991 42.037, -138.997785044232 41.9994454595406, -139.004 41.969, -139.035645873997 41.7890661698517, -139.061212567475 41.6462082823816, -139.428 39.584, -139.673 38.073, -139.713116752585 37.8001474769807, -139.766 37.457, -139.764942047737 37.4567768906428, -139.898 36.573, -139.897723683259 36.5729429963606, -139.986 35.994, -140.04777653037 35.5462970502163, -140.094 35.232, -140.090797568766 35.2315144621917, -140.153 34.772))");
-    list.add(item);
-
-
-
-    //London is in niether - 51°30'0.00"N   0° 7'0.00"W
-    return list;
-  }
-
-
-  public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data)
-  {
-    // write sample data
-    MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-    try
-    {
-      BatchWriter writer;
-      if (mtbw != null)
-      {
-        writer = mtbw.getBatchWriter("partition");
-      }
-      else
-      {
-        writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-      }
-      int count = 0;
-      Mutation m;
-      for (Map<String, String> object : data)
-      {
-        count++;
-        String id = (count < 10 ? "0" + count : "" + count);
-        Text partition = new Text("" + (count % NUM_PARTITIONS));
-
-        StringBuilder value = new StringBuilder();
-        boolean first = true;
-        for (Entry<String, String> entry : object.entrySet())
-        {
-          if (!first)
-          {
-            value.append("\u0000");
-          }
-          else
-          {
-            first = false;
-          }
-          value.append(entry.getKey());
-          value.append("\uFFFD");
-          value.append(entry.getValue());
-
-          // write the general index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-
-          // write the specific index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-        }
-
-        // write the event mutation
-        m = new Mutation(partition);
-        m.put("event", id, value.toString());
-        writer.addMutation(m);
-      }
-      writer.close();
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableNotFoundException e)
-    {
-      e.printStackTrace();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/filter.txt
----------------------------------------------------------------------
diff --git a/partition/iterator-test/filter.txt b/partition/iterator-test/filter.txt
deleted file mode 100644
index b53773f..0000000
--- a/partition/iterator-test/filter.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-<BBOX>
-	<gml:Envelope>
-		<gml:LowerCorner>119 33</gml:LowerCorner>
-		<gml:UpperCorner>120 34</gml:UpperCorner>
-	</gml:Envelope>
-</BBOX>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/pom.xml
----------------------------------------------------------------------
diff --git a/partition/iterator-test/pom.xml b/partition/iterator-test/pom.xml
deleted file mode 100644
index daed27f..0000000
--- a/partition/iterator-test/pom.xml
+++ /dev/null
@@ -1,99 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<parent>
-		<artifactId>dss</artifactId>
-		<groupId>dss</groupId>
-		<version>2.0.0-SNAPSHOT</version>
-	</parent>
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>dss.webservice</groupId>
-	<artifactId>iterator-test</artifactId>
-	<packaging>jar</packaging>
-	<name>webservice-test</name>
-	<version>0.2.0-SNAPSHOT</version>
-	<description />
-	
-	<properties>
-		<env>USER</env>
-	</properties>
-	
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-jar-plugin</artifactId>
-				<configuration>
-					<archive>
-						<manifest>
-							<mainClass>dss.webservice.itr.Main</mainClass>
-						</manifest>
-					</archive>
-				</configuration>
-			</plugin>
-			<plugin>
-				<artifactId>maven-dependency-plugin</artifactId>
-				<executions>
-					<execution>	
-						<id>unpack-dependencies</id>
-						<phase>generate-resources</phase>
-						<goals>
-							<goal>unpack-dependencies</goal>
-						</goals>
-					</execution>
-				</executions>
-			</plugin>
-		</plugins>
-		<resources>
-			<resource>
-				<directory>${basedir}/target/dependency</directory>
-			</resource>
-		</resources>
-	</build>
-	<dependencies>
-		<dependency>
-			<groupId>cloudbase</groupId>
-			<artifactId>cloudbase-core</artifactId>
-			<version>1.3.1</version>
-		</dependency>
-		<dependency>
-			<groupId>cloudbase</groupId>
-			<artifactId>cloudbase-start</artifactId>
-			<version>1.3.1</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.thrift</groupId>
-			<artifactId>thrift</artifactId>
-			<version>0.3</version>
-		</dependency>
-		<dependency>
-			<groupId>javax.servlet</groupId>
-			<artifactId>servlet-api</artifactId>
-			<scope>provided</scope>
-			<version>2.5</version>
-		</dependency>
-		<dependency>
-			<groupId>log4j</groupId>
-			<artifactId>log4j</artifactId>
-		</dependency>
-		<dependency>
-			<groupId>commons-logging</groupId>
-			<artifactId>commons-logging</artifactId>
-			<version>1.0.4</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-common</artifactId>
-			<version>0.20.1</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache</groupId>
-			<artifactId>zookeeper</artifactId>
-			<version>3.3.0</version>
-		</dependency>
-		<dependency>
-			<groupId>sitestore.common</groupId>
-			<artifactId>common-query</artifactId>
-			<version>2.0.0-SNAPSHOT</version>
-		</dependency>
-	</dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java b/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java
deleted file mode 100644
index 6b040fc..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java
+++ /dev/null
@@ -1,348 +0,0 @@
-package dss.webservice.itr;
-
-import java.io.File;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Scanner;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-
-import ss.cloudbase.core.iterators.CellLevelFilteringIterator;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.ConversionIterator;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedMinIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.iterators.filter.RegExFilter;
-import cloudbase.core.security.Authorizations;
-
-public class Main {
-	private static final Logger logger = Logger.getLogger(Main.class);
-	
-	private static String CB_INSTANCE = "INSTANCENAME"; // INSERT INSTANCE NAME
-	private static String ZK_SERVERS = "r02sv22:2181,r03sv23:2181,r04sv22:2181,r05sv23:2181";
-	private static String CB_USER = "user"; // SET USERNAME
-	private static String CB_PASS = "pass"; // SET PASSWORD
-	private static String CB_AUTH = "U,FOUO";
-	private static String CB_TABLE = "partition_gi";
-	
-	public static void main(String[] args) {
-		Map<String,String> request = new TreeMap<String, String>();
-		
-		int itrLevel = 50;
-		
-		for (String pair: args) {
-			String[] parts = pair.split("[=]");
-			if (parts.length == 1) {
-				request.put(parts[0], parts[0]);
-			} else if (parts.length == 2) {
-				request.put(parts[0], parts[1]);
-			}
-		}
-		
-		BatchScanner reader = null;
-		
-		String filter = request.remove("filter");
-		String terms = request.remove("terms");
-		String ranges = request.remove("ranges");
-		String partition = request.remove("partition");
-		String rangeFamily = request.remove("rangeFamily");
-		String prefix = request.remove("prefix");
-		String index = request.remove("index");
-		String test = request.remove("test");
-		String testKey = request.remove("testKey");
-		String convert = request.remove("convert");
-		String grep = request.remove("grep");
-		int print = -1;
-		
-		try {
-			print = Integer.parseInt(request.remove("print"));
-		} catch (NumberFormatException e) {
-			print = 0;
-		}
-	
-		boolean dryRun = request.remove("dryRun") != null;
-		boolean debug = request.remove("debug") != null;
-		boolean startInclusive = request.remove("start") != null;
-		boolean endInclusive = request.remove("end") != null;
-		boolean nodoc = request.remove("nodoc") != null;
-		boolean multiDoc = request.remove("multiDoc") != null;
-		boolean aggregate = request.remove("aggregate") != null;
-		
-		int threads = 5;
-		if (request.containsKey("threads")) {
-			threads = Integer.parseInt(request.remove("threads"));
-		}
-		
-		if (partition != null) {
-			partition = partition.replace(".", "\u0000");
-		}
-		
-		if (index != null) {
-			index = index.replace(':', '=');
-		}
-		
-		if (testKey != null) {
-			testKey = testKey.replace(".", "\u0000");
-		}
-		
-		if (request.containsKey("c")) {
-			CB_INSTANCE = request.remove("c");
-		}
-		
-		if (request.containsKey("z")) {
-			ZK_SERVERS = request.remove("z");
-		}
-		
-		if (request.containsKey("u")) {
-			CB_USER = request.remove("u");
-		}
-		
-		if (request.containsKey("p")) {
-			CB_PASS = request.remove("p");
-		}
-		
-		if (request.containsKey("s")) {
-			CB_AUTH = request.remove("s");
-		}
-		
-		if (request.containsKey("t")) {
-			CB_TABLE = request.remove("t");
-		}
-		
-		logger.info("Cloudbase Connection: ");
-		logger.info("\tc (instance):\t" + CB_INSTANCE);
-		logger.info("\tz (zk servers):\t" + ZK_SERVERS);
-		logger.info("\tu (user):\t" + CB_USER);
-		logger.info("\tp (pass):\t" + CB_PASS);
-		logger.info("\ts (auths):\t" + CB_AUTH);
-		logger.info("\tt (table):\t" + CB_TABLE);
-		
-		logger.info("Query Parameters:");
-		logger.info("\tindex:\t\t" + index);
-		logger.info("\tfilter:\t\t" + filter);
-		logger.info("\tterms:\t\t" + terms);
-		logger.info("\tgrep:\t\t" + grep);
-		logger.info("\tprefix:\t\t" + prefix);
-		logger.info("\tranges:\t\t" + ranges);
-		logger.info("\trangeFamily:\t" + rangeFamily);
-		logger.info("\tpartition:\t" + partition);
-		logger.info("\tstartInc:\t" + startInclusive);
-		logger.info("\tendInc:\t\t" + endInclusive);
-		logger.info("\tthreads:\t" + threads);
-		logger.info("\tprint:\t\t" + print);
-		logger.info("\tdryRun:\t\t" + dryRun);
-		logger.info("\tdebug:\t\t" + debug);
-		logger.info("\ttestKey:\t" + testKey);
-		logger.info("\tmultiDoc:\t" + multiDoc);
-		logger.info("\taggregate:\t" + aggregate);
-		logger.info("\tconvert:\t" + convert);
-		
-		logger.info("Unknown Parameters: ");
-		for (Entry<String,String> entry: request.entrySet()) {
-			logger.info("\t" + entry.getKey() + ":\t\t" + entry.getValue());
-		}
-		
-		if (debug) {
-			// set the cloudbase logging to trace
-			Logger.getLogger("cloudbase").setLevel(Level.TRACE);
-		}
-		
-		boolean iteratorSet = false;
-		
-		try {
-			ZooKeeperInstance zk = new ZooKeeperInstance(CB_INSTANCE, ZK_SERVERS);
-			Connector connector = new Connector(zk, CB_USER, CB_PASS.getBytes());
-			if (test != null) {
-				Test t = (Test) Class.forName("dss.webservice.itr.test." + test).newInstance();
-				t.runTest(request, connector, CB_TABLE, CB_AUTH);
-				logger.info("done.");
-				System.exit(0);
-			}
-			reader = connector.createBatchScanner(CB_TABLE, new Authorizations(CB_AUTH.split(",")), threads);
-	
-			Set<Range> partitionRanges = new HashSet<Range>();
-			if (partition != null) {
-				partition = partition.replace(".", "\u0000");
-				Key startKey = null;
-				Key endKey = null;
-				if (partition.contains(",")) {
-					startKey = new Key(new Text(partition.split(",")[0]));
-					endKey = new Key(new Text(partition.split(",")[1]));
-				} else {
-					startKey = new Key(new Text(partition));
-					endKey = startKey.followingKey(PartialKey.ROW);
-				}
-				
-				Range range = new Range(startKey, true, endKey, false);
-				if (testKey != null) {
-					Key kTest = new Key(new Text(testKey));
-					if (range.contains(kTest)) {
-						logger.info("Key " + kTest + " is in the current range");
-					} else {
-						logger.info("Key " + kTest + " is not in the current range");
-					}
-				}
-				partitionRanges.add(range);
-			} else {
-				partitionRanges.add(new Range());
-			}
-	
-			if (terms != null && terms.trim().length() > 0) {
-				String[] parts = terms.trim().split(",");
-				if (parts.length == 1) {
-					logger.info("Creating range iterator from '" + parts[0] + "' to '" + parts[0] + "\\u0000'.");
-					reader.setScanIterators(itrLevel++, SortedRangeIterator.class.getName(), "ri");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, "event");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, "index");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, parts[0]);
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, parts[0] + "\u0000");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + multiDoc);
-					iteratorSet = true;
-				} else if (parts.length > 1) {
-					logger.info("Creating intersecting iterator from all terms");
-					Text[] t = new Text[parts.length];
-					for (int i = 0; i < parts.length; i++) {
-						if (parts[i].startsWith("range")) {
-							parts[i] = parts[i].replace("_", "\u0000");
-						}
-						
-						t[i] = new Text(parts[i]);
-						logger.info("Adding Term: " + parts[i]);
-					}
-
-					reader.setScanIterators(itrLevel++, GMDenIntersectingIterator.class.getName(), "ii");
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, "event");
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, "index");
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(t));
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + multiDoc);
-					iteratorSet = true;
-				}
-			} else if (ranges != null && ranges.trim().length() > 0) {
-				// set up a range iterator
-				logger.info("Creating range iterator on " + (rangeFamily != null ? rangeFamily: "index") + " for all ranges startInclusive: " + startInclusive + " endInclusive: " + endInclusive);
-				String[] parts = ranges.trim().split(",");
-				if (parts.length > 1 && parts.length % 2 == 0) {
-//					reader.setScanIterators(itrLevel++, RangeIterator.class.getName(), "ri");
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_INDEX_COLF, rangeFamily != null ? rangeFamily: "index");
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_START_INCLUSIVE, "" + startInclusive);
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_END_INCLUSIVE, "" + endInclusive);
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_RANGES, RangeIterator.encodeRanges(parts));
-					
-					reader.setScanIterators(itrLevel++, SortedRangeIterator.class.getName(), "ir");
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_COLF, rangeFamily != null ? rangeFamily: "index");
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_START_INCLUSIVE, "" + startInclusive);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_END_INCLUSIVE, "" + endInclusive);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_LOWER_BOUND, parts[0]);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_UPPER_BOUND, parts[1]);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_MULTI_DOC, "" + multiDoc);
-					iteratorSet = true;
-				} else {
-					throw new RuntimeException("A start and end range must be given for each range");
-				}
-			} else if (index != null && index.trim().length() > 0 && partition != null) {
-				// look for an index on a partition
-				
-				// get out the ranges and add the index colf and term colq
-				Range r = partitionRanges.iterator().next();
-				Key start = new Key (r.getStartKey().getRow(), new Text("index"), new Text(index));
-				Key end = new Key (r.getStartKey().getRow(), new Text("index"), new Text(index + "\uFFFD"));
-				partitionRanges.clear();
-				partitionRanges.add(new Range(start, true, end, false));
-				iteratorSet = true;
-				
-			} else if (prefix != null && prefix.trim().length() > 0) {
-				logger.info("Setting a min iterator on " + prefix);
-				reader.setScanIterators(itrLevel++, SortedMinIterator.class.getName(), "mi");
-				reader.setScanIteratorOption("mi", SortedMinIterator.OPTION_PREFIX, prefix);
-				reader.setScanIteratorOption("mi", SortedMinIterator.OPTION_MULTI_DOC, "" + multiDoc);
-				iteratorSet = true;
-			}
-			
-			if (aggregate) {
-				reader.setScanIterators(itrLevel++, CellLevelRecordIterator.class.getName(), "aggregator");
-			}
-			
-			if (filter != null && filter.trim().length() > 0) {
-				logger.info("Creating filtering iterator from filter in " + filter);
-				Scanner scanner = new Scanner(new File(filter));
-				
-				filter = "";
-				while (scanner.hasNextLine()) {
-					filter += scanner.nextLine().trim(); 
-				}
-				
-				// set up a filtering iterator
-				logger.info("Filer = " + filter);
-				
-				if (multiDoc && !aggregate) {
-					reader.setScanIterators(itrLevel++, CellLevelFilteringIterator.class.getName(), "fi");
-					reader.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, filter);
-				} else {
-					reader.setScanIterators(itrLevel++, FilteringIterator.class.getName(), "fi");
-					reader.setScanIteratorOption("fi", "0", OGCFilter.class.getName());
-					reader.setScanIteratorOption("fi", "0." + OGCFilter.OPTION_FILTER, filter);
-//					reader.setScanIteratorOption("fi", "1", RegExFilter.class.getName());
-//					reader.setScanIteratorOption("fi", "1." + RegExFilter.ROW_REGEX, "theRegex");
-				}
-				iteratorSet = true;
-			}
-			
-			if (convert != null && convert.trim().length() > 0) {
-				convert = convert.replaceAll("_", " ");
-				String[] conversions = convert.split(",");
-				reader.setScanIterators(itrLevel++, ConversionIterator.class.getName(), "ci");
-				reader.setScanIteratorOption("ci", ConversionIterator.OPTION_CONVERSIONS, ConversionIterator.encodeConversions(conversions));
-				reader.setScanIteratorOption("ci", ConversionIterator.OPTION_MULTI_DOC, "" + (multiDoc && ! aggregate));
-			}
-			
-			logger.info("Setting range to: " + partitionRanges.iterator().next());
-			reader.setRanges(partitionRanges);
-			
-			if (!iteratorSet) {
-				reader.fetchColumnFamily(new Text("event"));
-			}
-			if (!dryRun) {
-				long start = System.currentTimeMillis();
-				int count = 0;
-				String id = null;
-				for (Entry<Key, Value> entry: reader) {
-					count++;
-					if (print == -1 || count <= print) {
-						String text = entry.getKey() + "\t" + entry.getValue();
-						
-						if ((grep != null && text.contains(grep)) || grep == null) {
-							logger.info(text);
-						}
-					}
-				}
-				reader.close();
-				logger.info("Time: " + (System.currentTimeMillis() - start) + " ms");
-				logger.info("Count: " + count);
-			} else if (!iteratorSet) {
-				logger.info("No iterator was set from the provided parameters (and I'm not doing a full table scan... so there).");
-			} else {
-				logger.info("Dry run complete.");
-			}
-			logger.info("Done");
-			System.exit(0);
-		} catch (Exception e) {
-			logger.error(e, e);
-			System.exit(1);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java b/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java
deleted file mode 100644
index 0b036b7..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package dss.webservice.itr;
-
-import java.util.Map;
-
-import cloudbase.core.client.Connector;
-
-public interface Test {
-	void runTest(Map<String, String> request, Connector connector, String table, String auths);
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java b/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java
deleted file mode 100644
index 2139528..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package dss.webservice.itr.test;
-
-import java.util.Map;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.ColumnVisibility;
-import dss.webservice.itr.Test;
-
-public class AddTestRecords implements Test {
-
-	@Override
-	public void runTest(Map<String, String> request, Connector connector, String table, String auths) {
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000000, 500, 4);
-		try {
-			BatchWriter writer = mtbw.getBatchWriter(table);
-			Mutation m = new Mutation(new Text("elint//rdate:79899179//geokey:20223312022200"));
-			m.put(new Text("event"), new Text("02eacfa1-b548-11df-b72e-002219501672"), new ColumnVisibility(new Text("U&FOUO")), new Value("uuid~event\uFFFD02eacfa1-b548-11df-b72e-002219501672\u0000date\uFFFD20100820\u0000time~dss\uFFFD010226.000\u0000technology\uFFFDelint\u0000feedName\uFFFDParserBinarySpSigFlat\u0000systemName\uFFFDSP\u0000pddg\uFFFDBJ\u0000latitude\uFFFD46.79429069085071\u0000longitude\uFFFD9.852863417535763\u0000altitude\uFFFD1841.0\u0000geoerror~semimajor\uFFFD3709.1270902747297\u0000geoerror~semiminor\uFFFD1896.9438653491684\u0000geoerror~tilt\uFFFD68.68795738630202\u0000frequency\uFFFD\u0000cenot_elnot\uFFFD008LJ\u0000datetime\uFFFD2010-08-20T01:02:26.000Z".getBytes()));
-			
-			writer.addMutation(m);
-			mtbw.flush();
-			mtbw.close();
-		} catch (CBException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java b/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java
deleted file mode 100644
index a8f01a9..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java
+++ /dev/null
@@ -1,132 +0,0 @@
-package dss.webservice.itr.test;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import dss.webservice.itr.Test;
-
-public class BaseTileTest implements Test {
-	private static final Logger logger = Logger.getLogger(BaseTileTest.class);
-	
-	String comboIndexTable = "index_v2";
-	String type = "hpcp";
-	
-	@Override
-	public void runTest(Map<String, String> request, Connector connector, String table, String auths) {
-		if (!request.containsKey("dates")) {
-			logger.warn("No 'dates' parameter supplied. e.g. dates=20100720,20100721...");
-			return;
-		}
-		
-		if (request.containsKey("type")) {
-			type = request.get("type");
-		}
-		
-		String[] dates = request.get("dates").split(",");		
-		
-		List<Long> comboTimes = new ArrayList<Long>();
-		List<Long> partTimes = new ArrayList<Long>();
-		List<Long> comboCounts = new ArrayList<Long>();
-		List<Long> partCounts = new ArrayList<Long>();
-		List<String> errors = new ArrayList<String>();
-		try {
-			for (String date: dates) {
-				long rdate = 99999999 - Long.parseLong(date);
-				for (int g = 0; g < 8; g++) {
-					String begin = type + "//rdate:" + rdate + "//geokey:" + g;
-					String end   = type + "//rdate:" + rdate + "//geokey:" + (g+1);
-					long count = 0;
-					Set<Range> ranges = new HashSet<Range>();
-
-					logger.info("Running test for " + begin + " ...");
-					// run combo index test
-					BatchScanner reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), 30);
-					ranges.add(new Range(new Key(new Text(begin)), true, new Key(new Text(end)), false));
-					
-					reader.setRanges(ranges);
-					long start = System.currentTimeMillis();
-					for (Entry<Key, Value> entry: reader) {
-						count++;
-					}
-					comboTimes.add(System.currentTimeMillis() - start);
-					comboCounts.add(count);
-					
-					logger.info("\tC count=" + count + " time=" + comboTimes.get(comboTimes.size() - 1) + " ms");
-					
-					count = 0;
-					
-					// run partition index test
-//					reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), 30);
-//					
-//					reader.setScanIterators(3, SortedRangeIterator.class.getName(), "ri");
-//					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, begin.replace("geokey", "geoKey"));
-//					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, end.replace("geokey", "geoKey"));
-//					
-//					ranges.clear();
-//					ranges.add(new Range(new Key(new Text("date:" + date)), true, new Key(new Text("date:" + date + "z")), false));
-//					reader.setRanges(ranges);
-//					
-//					start = System.currentTimeMillis();
-//					for (Entry<Key, Value> entry: reader) {
-//						count++;
-//					}
-//					partTimes.add(System.currentTimeMillis() - start);
-//					partCounts.add(count);
-//					
-//					if (count != comboCounts.get(comboCounts.size() - 1)) {
-//						String msg = "Counts differed for " + begin + " C: " + comboCounts.get(comboCounts.size() - 1) + " P: " + count; 
-//						logger.warn(msg);
-//						errors.add(msg);
-//					}
-//					logger.info("\tP count=" + count + " time=" + partTimes.get(partTimes.size() - 1) + " ms");
-				}
-			}
-			
-			logger.info("********************* RESULTS *********************");
-			logger.info("Tested all 0 level tiles on " + type + " for " + request.get("dates"));
-			//logger.info("This is a test of SortedRangeIterator performance");
-			
-			double comboSum = 0, partSum = 0;
-			for (int i = 0; i < comboTimes.size(); i++) {
-				comboSum += comboTimes.get(i);
-				//partSum += partTimes.get(i);
-			}
-			
-			logger.info("Average C Time: " + (comboSum / comboTimes.size()) + " ms");
-			//logger.info("Average P Time: " + (partSum / partTimes.size()) + " ms");
-			
-			comboSum = 0; 
-			partSum = 0;
-			
-			for (int i = 0; i < comboCounts.size(); i++) {
-				comboSum += comboCounts.get(i);
-				//partSum += partCounts.get(i);
-			}
-			
-			logger.info("Average C Count: " + (comboSum / comboCounts.size()));
-			//logger.info("Average P Count: " + (partSum / partCounts.size()));
-			
-			if (errors.size() > 0) {
-				logger.warn("ERRORS!!!:");
-				for (String e: errors) {
-					logger.warn(e);
-				}
-			}
-		} catch (Exception e) {
-			logger.error(e, e);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java b/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java
deleted file mode 100644
index 01abb31..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-package dss.webservice.itr.test;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import ss.cloudbase.core.iterators.ConversionIterator;
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import dss.webservice.itr.Test;
-
-public class ConversionTest implements Test {
-	private static final Logger logger = Logger.getLogger(ConversionTest.class);
-	
-	String comboIndexTable = "index_v3";
-	String type = "hpcp";
-	
-	@Override
-	public void runTest(Map<String, String> request, Connector connector, String table, String auths) {
-		if (!request.containsKey("dates")) {
-			logger.warn("No 'dates' parameter supplied. e.g. dates=20100720,20100721...");
-			return;
-		}
-		
-		if (request.containsKey("type")) {
-			type = request.get("type");
-		}
-		
-		int threads = 5;
-		if (request.containsKey("threads")) {
-			threads = Integer.parseInt(request.remove("threads"));
-		}
-		
-		String[] dates = request.get("dates").split(",");		
-		
-		List<Long> baseTimes = new ArrayList<Long>();
-		List<Long> convertTimes = new ArrayList<Long>();
-		List<Long> baseCounts = new ArrayList<Long>();
-		List<Long> convertCounts = new ArrayList<Long>();
-		List<String> errors = new ArrayList<String>();
-		
-		List<Value> values = new ArrayList<Value>();
-		
-		try {
-			for (String date: dates) {
-				long rdate = 99999999 - Long.parseLong(date);
-				for (int g = 0; g < 8; g++) {
-					String begin = type + "//rdate:" + rdate + "//geokey:" + g;
-					String end   = type + "//rdate:" + rdate + "//geokey:" + (g+1);
-					long count = 0;
-					Set<Range> ranges = new HashSet<Range>();
-
-					logger.info("Running test for " + begin + " ...");
-					// run combo index test
-					BatchScanner reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), threads);
-					ranges.add(new Range(new Key(new Text(begin)), true, new Key(new Text(end)), false));
-					
-					reader.setRanges(ranges);
-					values.clear();
-					long start = System.currentTimeMillis();
-					for (Entry<Key, Value> entry: reader) {
-						values.add(entry.getValue());
-						count++;
-					}
-					baseTimes.add(System.currentTimeMillis() - start);
-					baseCounts.add(count);
-					
-					logger.info("\tBase    count=" + count + " time=" + baseTimes.get(baseTimes.size() - 1) + " ms");
-					
-					count = 0;
-					for (Value value: values) {
-						logger.info("\t"  + value.toString());
-						count++;
-						if (count == 2) {
-							break;
-						}
-					}
-					
-					count = 0;
-					values.clear();
-					
-					reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), threads);
-					ranges.add(new Range(new Key(new Text(begin)), true, new Key(new Text(end)), false));
-					
-					reader.setScanIterators(50, ConversionIterator.class.getName(), "ci");
-					reader.setScanIteratorOption("ci", ConversionIterator.OPTION_CONVERSIONS, ConversionIterator.encodeConversions(new String[] {
-						"frequency / 1000000"
-					}));
-					
-					reader.setRanges(ranges);
-					values.clear();
-					start = System.currentTimeMillis();
-					for (Entry<Key, Value> entry: reader) {
-						values.add(entry.getValue());
-						count++;
-					}
-					
-					convertTimes.add(System.currentTimeMillis() - start);
-					convertCounts.add(count);
-					
-					logger.info("\tConvert count=" + count + " time=" + convertTimes.get(convertTimes.size() - 1) + " ms");
-					
-					count = 0;
-					for (Value value: values) {
-						logger.info("\t"  + value.toString());
-						count++;
-						if (count == 2) {
-							break;
-						}
-					}
-				}
-			}
-			
-			logger.info("********************* RESULTS *********************");
-			logger.info("Tested all 0 level tiles on " + type + " for " + request.get("dates"));
-			logger.info("This is a test of ConversionIterator performance");
-			
-			double baseSum = 0, convertSum = 0;
-			for (int i = 0; i < baseTimes.size(); i++) {
-				baseSum += baseTimes.get(i);
-				convertSum += convertTimes.get(i);
-			}
-			
-			logger.info("Average Base    Time: " + (baseSum / baseTimes.size()) + " ms");
-			logger.info("Average Convert Time: " + (convertSum / convertTimes.size()) + " ms");
-			
-			baseSum = 0; 
-			convertSum = 0;
-			
-			for (int i = 0; i < baseCounts.size(); i++) {
-				baseSum += baseCounts.get(i);
-				convertSum += convertCounts.get(i);
-			}
-			
-			logger.info("Average Base    Count: " + (baseSum / baseCounts.size()));
-			logger.info("Average Convert Count: " + (convertSum / convertCounts.size()));
-			
-			if (errors.size() > 0) {
-				logger.warn("ERRORS!!!:");
-				for (String e: errors) {
-					logger.warn(e);
-				}
-			}
-		} catch (Exception e) {
-			logger.error(e, e);
-		}
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/test.sh
----------------------------------------------------------------------
diff --git a/partition/iterator-test/test.sh b/partition/iterator-test/test.sh
deleted file mode 100644
index 5a6cf1f..0000000
--- a/partition/iterator-test/test.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-java -jar target/iterator-test-0.2.0-SNAPSHOT.jar z=localhost t=test $@

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/pom.xml
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/pom.xml b/partition/mr.partition.rdf/pom.xml
deleted file mode 100644
index bb1f0b2..0000000
--- a/partition/mr.partition.rdf/pom.xml
+++ /dev/null
@@ -1,79 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>mvm.rya</groupId>
-		<artifactId>parent</artifactId>
-		<version>2.0.0-SNAPSHOT</version>
-    </parent>
-
-    <groupId>mvm.mmrts.rdf</groupId>
-    <artifactId>mr.partition.rdf</artifactId>
-    <version>1.0.0-SNAPSHOT</version>
-    <name>${project.groupId}.${project.artifactId}</name>
-    
-    <dependencies>
-        <dependency>
-            <groupId>mvm.mmrts.rdf</groupId>
-            <artifactId>partition.rdf</artifactId>
-            <version>1.0.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryparser-sparql</artifactId>
-            <version>${openrdf.sesame.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>0.20.2</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.mrunit</groupId>
-            <artifactId>mrunit</artifactId>
-            <version>0.5.0-incubating</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-    <build>
-		<plugins>
-			<plugin>
-                <!-- NOTE: We don't need a groupId specification because the group is
-                        org.apache.maven.plugins ...which is assumed by default. -->
-                <artifactId>maven-assembly-plugin</artifactId>
-                <dependencies>
-                    <dependency>
-                        <groupId>mvm.cloud</groupId>
-                        <artifactId>hadoop-job-assembly</artifactId>
-                        <version>1.0.0-SNAPSHOT</version>
-                    </dependency>
-                </dependencies>
-                <executions>
-                    <execution>
-                        <id>make-assembly</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>single</goal>
-                        </goals>
-                        <configuration>
-                            <attach>false</attach>
-                            <descriptors>
-                                <descriptor>assemblies/job.xml</descriptor>
-                            </descriptors>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-</project>