You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@rya.apache.org by mi...@apache.org on 2015/12/22 17:49:25 UTC

[01/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Repository: incubator-rya
Updated Branches:
  refs/heads/master 92ddfa591 -> 7743a42a5


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/resources/controllerTest-context.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/resources/controllerTest-context.xml b/web/web.rya/src/test/resources/controllerTest-context.xml
index 27fd8bf..ec43c5b 100644
--- a/web/web.rya/src/test/resources/controllerTest-context.xml
+++ b/web/web.rya/src/test/resources/controllerTest-context.xml
@@ -1,22 +1,23 @@
+<?xml version='1.0'?>
 <!--
-  #%L
-  mvm.rya.web.rya
-  %%
-  Copyright (C) 2014 Rya
-  %%
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  #L%
-  -->
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
        xsi:schemaLocation="


[49/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
RYA-7 POM and License Clean-up for Apache Move

Numerous POM and License Mods:
 - Make the apache pom the parent
 - Depend on Apache releases for Hadoop and ZooKeeper
 - Updated to most recent bug-fix release for dependencies
 - Moved Deps to Parent POM
 - Change groupId from mvn.rya to org.apache.rya
 - Set proper names for each maven module
 - Fix SCM block in parent pom
 - Remove Dead/CB Projects
 - Remove Extra Repos/Profiles
 - Moved rya.sail.impl to rya.sail


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/80faf06d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/80faf06d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/80faf06d

Branch: refs/heads/master
Commit: 80faf06d48165e65271a52856238896521fcf218
Parents: ce4a10f
Author: Aaron Mihalik <mi...@alum.mit.edu>
Authored: Tue Dec 1 15:11:17 2015 -0500
Committer: Aaron Mihalik <mi...@alum.mit.edu>
Committed: Fri Dec 4 11:14:06 2015 -0500

----------------------------------------------------------------------
 NOTICE                                          |     6 +
 README.md                                       |    17 +
 common/pom.xml                                  |    32 +-
 common/rya.api/pom.xml                          |    48 +-
 .../InvalidValueTypeMarkerRuntimeException.java |    31 +-
 .../api/RdfCloudTripleStoreConfiguration.java   |    31 +-
 .../rya/api/RdfCloudTripleStoreConstants.java   |    31 +-
 .../rya/api/RdfCloudTripleStoreStatement.java   |    31 +-
 .../mvm/rya/api/RdfCloudTripleStoreUtils.java   |    33 +-
 .../rya/api/date/DateTimeTtlValueConverter.java |    31 +-
 .../api/date/TimestampTtlStrValueConverter.java |    31 +-
 .../api/date/TimestampTtlValueConverter.java    |    31 +-
 .../mvm/rya/api/date/TtlValueConverter.java     |    31 +-
 .../src/main/java/mvm/rya/api/domain/Node.java  |    31 +-
 .../main/java/mvm/rya/api/domain/RangeURI.java  |    31 +-
 .../java/mvm/rya/api/domain/RangeValue.java     |    31 +-
 .../main/java/mvm/rya/api/domain/RyaRange.java  |    31 +-
 .../main/java/mvm/rya/api/domain/RyaSchema.java |    31 +-
 .../java/mvm/rya/api/domain/RyaStatement.java   |    31 +-
 .../main/java/mvm/rya/api/domain/RyaType.java   |    31 +-
 .../java/mvm/rya/api/domain/RyaTypePrefix.java  |    31 +-
 .../java/mvm/rya/api/domain/RyaTypeRange.java   |    31 +-
 .../main/java/mvm/rya/api/domain/RyaURI.java    |    31 +-
 .../java/mvm/rya/api/domain/RyaURIPrefix.java   |    31 +-
 .../java/mvm/rya/api/domain/RyaURIRange.java    |    31 +-
 .../api/domain/utils/RyaStatementWritable.java  |    31 +-
 .../mvm/rya/api/layout/TableLayoutStrategy.java |    31 +-
 .../api/layout/TablePrefixLayoutStrategy.java   |    31 +-
 .../mvm/rya/api/persist/RdfDAOException.java    |    31 +-
 .../mvm/rya/api/persist/RdfEvalStatsDAO.java    |    31 +-
 .../java/mvm/rya/api/persist/RyaConfigured.java |    31 +-
 .../main/java/mvm/rya/api/persist/RyaDAO.java   |    31 +-
 .../mvm/rya/api/persist/RyaDAOException.java    |    31 +-
 .../rya/api/persist/RyaNamespaceManager.java    |    31 +-
 .../api/persist/index/RyaSecondaryIndexer.java  |    20 +
 .../persist/joinselect/SelectivityEvalDAO.java  |    31 +-
 .../rya/api/persist/query/BatchRyaQuery.java    |    31 +-
 .../mvm/rya/api/persist/query/RyaQuery.java     |    31 +-
 .../rya/api/persist/query/RyaQueryEngine.java   |    31 +-
 .../rya/api/persist/query/RyaQueryOptions.java  |    31 +-
 .../rya/api/persist/query/join/HashJoin.java    |    31 +-
 .../api/persist/query/join/IterativeJoin.java   |    31 +-
 .../mvm/rya/api/persist/query/join/Join.java    |    31 +-
 .../rya/api/persist/query/join/MergeJoin.java   |    31 +-
 .../mvm/rya/api/persist/utils/RyaDAOHelper.java |    31 +-
 .../strategy/AbstractTriplePatternStrategy.java |    31 +-
 .../mvm/rya/api/query/strategy/ByteRange.java   |    31 +-
 .../query/strategy/TriplePatternStrategy.java   |    31 +-
 .../HashedPoWholeRowTriplePatternStrategy.java  |    31 +-
 .../HashedSpoWholeRowTriplePatternStrategy.java |    31 +-
 .../OspWholeRowTriplePatternStrategy.java       |    31 +-
 .../PoWholeRowTriplePatternStrategy.java        |    31 +-
 .../SpoWholeRowTriplePatternStrategy.java       |    31 +-
 .../resolver/CustomRyaTypeResolverMapping.java  |    31 +-
 .../rya/api/resolver/RdfToRyaConversions.java   |    31 +-
 .../java/mvm/rya/api/resolver/RyaContext.java   |    31 +-
 .../rya/api/resolver/RyaToRdfConversions.java   |    31 +-
 .../mvm/rya/api/resolver/RyaTripleContext.java  |    31 +-
 .../mvm/rya/api/resolver/RyaTypeResolver.java   |    31 +-
 .../api/resolver/RyaTypeResolverException.java  |    31 +-
 .../api/resolver/RyaTypeResolverMapping.java    |    31 +-
 .../resolver/impl/BooleanRyaTypeResolver.java   |    31 +-
 .../api/resolver/impl/ByteRyaTypeResolver.java  |    31 +-
 .../resolver/impl/CustomDatatypeResolver.java   |    31 +-
 .../resolver/impl/DateTimeRyaTypeResolver.java  |    31 +-
 .../resolver/impl/DoubleRyaTypeResolver.java    |    31 +-
 .../api/resolver/impl/FloatRyaTypeResolver.java |    31 +-
 .../resolver/impl/IntegerRyaTypeResolver.java   |    31 +-
 .../api/resolver/impl/LongRyaTypeResolver.java  |    31 +-
 .../api/resolver/impl/RyaTypeResolverImpl.java  |    31 +-
 .../rya/api/resolver/impl/RyaURIResolver.java   |    31 +-
 .../ServiceBackedRyaTypeResolverMappings.java   |    31 +-
 .../api/resolver/impl/ShortRyaTypeResolver.java |    31 +-
 .../mvm/rya/api/resolver/triple/TripleRow.java  |    31 +-
 .../rya/api/resolver/triple/TripleRowRegex.java |    31 +-
 .../api/resolver/triple/TripleRowResolver.java  |    31 +-
 .../triple/TripleRowResolverException.java      |    31 +-
 .../impl/WholeRowHashedTripleResolver.java      |    31 +-
 .../triple/impl/WholeRowTripleResolver.java     |    31 +-
 .../mvm/rya/api/security/SecurityProvider.java  |    20 +
 .../api/utils/CloseableIterableIteration.java   |    31 +-
 .../mvm/rya/api/utils/EnumerationWrapper.java   |    31 +-
 .../java/mvm/rya/api/utils/IteratorWrapper.java |    31 +-
 .../rya/api/utils/NullableStatementImpl.java    |    31 +-
 .../api/utils/PeekingCloseableIteration.java    |    31 +-
 .../RyaStatementAddBindingSetFunction.java      |    31 +-
 ...ementRemoveBindingSetCloseableIteration.java |    31 +-
 .../mvm/rya/api/domain/RyaURIPrefixTest.java    |    31 +-
 .../mvm/rya/api/persist/query/RyaQueryTest.java |    31 +-
 .../AbstractTriplePatternStrategyTest.java      |    31 +-
 ...shedPoWholeRowTriplePatternStrategyTest.java |    31 +-
 ...hedSpoWholeRowTriplePatternStrategyTest.java |    31 +-
 .../wholerow/MockRdfCloudConfiguration.java     |    20 +
 .../OspWholeRowTriplePatternStrategyTest.java   |    34 +-
 .../PoWholeRowTriplePatternStrategyTest.java    |    34 +-
 .../SpoWholeRowTriplePatternStrategyTest.java   |    34 +-
 .../mvm/rya/api/resolver/RyaContextTest.java    |    31 +-
 .../impl/CustomDatatypeResolverTest.java        |    31 +-
 .../impl/DateTimeRyaTypeResolverTest.java       |    31 +-
 .../impl/DoubleRyaTypeResolverTest.java         |    31 +-
 .../impl/IntegerRyaTypeResolverTest.java        |    31 +-
 .../resolver/impl/LongRyaTypeResolverTest.java  |    31 +-
 .../api/resolver/impl/RyaURIResolverTest.java   |    31 +-
 .../impl/HashedWholeRowTripleResolverTest.java  |    31 +-
 .../triple/impl/WholeRowTripleResolverTest.java |    31 +-
 .../test/java/mvm/rya/api/utils/RdfIOTest.java  |    34 +-
 common/rya.provenance/pom.xml                   |    64 +-
 .../provenance/LoggingProvenanceCollector.java  |    31 +-
 .../ProvenanceCollectionException.java          |    20 +
 .../provenance/ProvenanceCollector.java         |    20 +
 .../TriplestoreProvenanceCollector.java         |    20 +
 .../provenance/rdf/BaseProvenanceModel.java     |    20 +
 .../provenance/rdf/RDFProvenanceModel.java      |    20 +
 .../TriplestoreProvenanceCollectorTest.java     |    20 +
 .../provenance/rdf/BaseProvenanceModelTest.java |    20 +
 dao/accumulo.rya/pom.xml                        |   103 +-
 .../AccumuloNamespaceTableIterator.java         |    31 +-
 .../rya/accumulo/AccumuloRdfConfiguration.java  |    31 +-
 .../mvm/rya/accumulo/AccumuloRdfConstants.java  |    31 +-
 .../rya/accumulo/AccumuloRdfEvalStatsDAO.java   |    31 +-
 .../rya/accumulo/AccumuloRdfQueryIterator.java  |    34 +-
 .../java/mvm/rya/accumulo/AccumuloRdfUtils.java |    31 +-
 .../java/mvm/rya/accumulo/AccumuloRyaDAO.java   |    31 +-
 .../accumulo/DefineTripleQueryRangeFactory.java |    36 +-
 .../mvm/rya/accumulo/RyaTableKeyValues.java     |    33 +-
 .../rya/accumulo/RyaTableMutationsFactory.java  |    33 +-
 .../experimental/AbstractAccumuloIndexer.java   |    20 +
 .../accumulo/experimental/AccumuloIndexer.java  |    22 +-
 .../rya/accumulo/mr/AbstractAccumuloMRTool.java |    31 +-
 .../accumulo/mr/eval/AccumuloRdfCountTool.java  |    33 +-
 .../mr/fileinput/BulkNtripsInputTool.java       |    31 +-
 .../mr/fileinput/RdfFileInputByLineTool.java    |    31 +-
 .../mr/fileinput/RdfFileInputFormat.java        |    31 +-
 .../accumulo/mr/fileinput/RdfFileInputTool.java |    31 +-
 .../rya/accumulo/mr/upgrade/Upgrade322Tool.java |    31 +-
 .../mr/utils/AccumuloHDFSFileInputFormat.java   |    31 +-
 .../rya/accumulo/mr/utils/AccumuloProps.java    |    31 +-
 .../java/mvm/rya/accumulo/mr/utils/MRUtils.java |    31 +-
 .../accumulo/query/AccumuloRyaQueryEngine.java  |    42 +-
 .../query/KeyValueToRyaStatementFunction.java   |    31 +-
 .../accumulo/query/RangeBindingSetEntries.java  |    31 +-
 .../RyaStatementBindingSetKeyValueIterator.java |    31 +-
 .../query/RyaStatementKeyValueIterator.java     |    31 +-
 .../query/ScannerBaseCloseableIterable.java     |    31 +-
 .../mvm/rya/accumulo/utils/TimeRangeFilter.java |    33 +-
 .../accumulo/AccumuloRdfConfigurationTest.java  |    31 +-
 .../mvm/rya/accumulo/AccumuloRyaDAOTest.java    |    31 +-
 .../DefineTripleQueryRangeFactoryTest.java      |    34 +-
 .../mr/eval/AccumuloRdfCountToolTest.java       |    31 +-
 .../mr/fileinput/RdfFileInputToolTest.java      |    31 +-
 .../accumulo/mr/upgrade/Upgrade322ToolTest.java |    31 +-
 .../upgrade/UpgradeObjectSerializationTest.java |    33 +-
 dao/cloudbase.rya/pom.xml                       |   103 -
 .../mvm/rya/cloudbase/BatchScannerIterator.java |    59 -
 .../CloudbaseNamespaceTableIterator.java        |    78 -
 .../cloudbase/CloudbaseRdfConfiguration.java    |    44 -
 .../rya/cloudbase/CloudbaseRdfConstants.java    |    20 -
 .../rya/cloudbase/CloudbaseRdfEvalStatsDAO.java |   138 -
 .../mvm/rya/cloudbase/CloudbaseRdfUtils.java    |    50 -
 .../java/mvm/rya/cloudbase/CloudbaseRyaDAO.java |   428 -
 .../mvm/rya/cloudbase/RyaTableKeyValues.java    |    93 -
 .../rya/cloudbase/RyaTableMutationsFactory.java |    81 -
 .../mr/eval/CloudbaseRdfCountTool.java          |   350 -
 .../mr/fileinput/BulkNtripsInputTool.java       |   318 -
 .../mr/fileinput/RdfFileInputByLineTool.java    |   230 -
 .../mr/fileinput/RdfFileInputFormat.java        |   115 -
 .../mr/fileinput/RdfFileInputTool.java          |   185 -
 .../fileinput/ShardedBulkNtripsInputTool.java   |   314 -
 .../mr/upgrade/UpgradeCloudbaseRdfTables.java   |   350 -
 .../mvm/rya/cloudbase/mr/utils/MRUtils.java     |    94 -
 .../query/BatchScannerCloseableIterable.java    |    34 -
 .../query/CloudbaseRyaQueryEngine.java          |   385 -
 .../query/KeyValueToRyaStatementFunction.java   |    47 -
 .../cloudbase/query/RangeBindingSetEntries.java |    37 -
 .../RyaStatementBindingSetKeyValueIterator.java |   129 -
 .../query/RyaStatementKeyValueIterator.java     |    82 -
 .../query/ScannerCloseableIterable.java         |    35 -
 .../rya/cloudbase/CloudbaseResearchMain.java    |    77 -
 .../mvm/rya/cloudbase/CloudbaseRyaDAOTest.java  |   588 -
 .../DefineTripleQueryRangeFactoryTest.java      |   242 -
 dao/mongodb.rya/pom.xml                         |    72 +-
 .../mvm/rya/mongodb/MongoDBQueryEngine.java     |    20 +
 .../rya/mongodb/MongoDBRdfConfiguration.java    |    22 +-
 .../java/mvm/rya/mongodb/MongoDBRyaDAO.java     |    20 +
 .../mongodb/dao/MongoDBNamespaceManager.java    |    20 +
 .../rya/mongodb/dao/MongoDBStorageStrategy.java |    20 +
 .../dao/SimpleMongoDBNamespaceManager.java      |    28 +-
 .../dao/SimpleMongoDBStorageStrategy.java       |    20 +
 .../NonCloseableRyaStatementCursorIterator.java |    20 +
 .../RyaStatementBindingSetCursorIterator.java   |    20 +
 .../iter/RyaStatementCursorIterable.java        |    20 +
 .../iter/RyaStatementCursorIterator.java        |    20 +
 dao/pom.xml                                     |    32 +-
 extras/cloudbase.rya.giraph/pom.xml             |    69 -
 .../format/CloudbaseRyaVertexInputFormat.java   |    88 -
 .../format/CloudbaseRyaVertexOutputFormat.java  |    94 -
 .../format/CloudbaseVertexInputFormat.java      |   179 -
 .../format/CloudbaseVertexOutputFormat.java     |   189 -
 .../giraph/format/PrintVertexOutputFormat.java  |    94 -
 .../rya/cloudbase/giraph/format/BspCase.java    |   253 -
 .../format/TestCloudbaseVertexFormat.java       |   211 -
 extras/generic.mr/generic.mr.accumulo/pom.xml   |    58 -
 .../generic/mr/accumulo/AccumuloMRInfo.groovy   |   146 -
 .../services/mvm.rya.generic.mr.api.MRInfo      |     1 -
 extras/generic.mr/generic.mr.api/pom.xml        |    32 -
 .../groovy/mvm/rya/generic/mr/api/MRInfo.groovy |    43 -
 .../mvm/rya/generic/mr/api/MRInfoContext.groovy |    28 -
 extras/generic.mr/generic.mr.cloudbase/pom.xml  |    33 -
 .../generic/mr/cloudbase/CloudbaseMRInfo.groovy |   146 -
 .../services/mvm.rya.generic.mr.api.MRInfo      |     1 -
 extras/generic.mr/pom.xml                       |    27 -
 extras/indexing/pom.xml                         |   177 +-
 .../documentIndex/DocIndexIteratorUtil.java     |    20 +
 .../rya/accumulo/documentIndex/TextColumn.java  |    20 +
 .../rya/accumulo/mr/NullFreeTextIndexer.java    |    31 +-
 .../mvm/rya/accumulo/mr/NullGeoIndexer.java     |    33 +-
 .../rya/accumulo/mr/NullTemporalIndexer.java    |    20 +
 .../mvm/rya/accumulo/mr/RyaOutputFormat.java    |    31 +-
 .../mvm/rya/accumulo/mr/StatementWritable.java  |    31 +-
 .../fileinput/BulkNtripsInputToolIndexing.java  |    31 +-
 .../mr/fileinput/RyaBatchWriterInputTool.java   |    31 +-
 .../AccumuloPrecompQueryIndexer.java            |    20 +
 .../java/mvm/rya/indexing/DocIdIndexer.java     |    26 +-
 .../rya/indexing/FilterFunctionOptimizer.java   |    20 +
 .../java/mvm/rya/indexing/FreeTextIndexer.java  |    33 +-
 .../main/java/mvm/rya/indexing/GeoIndexer.java  |    31 +-
 .../ExternalIndexMatcher.java                   |    24 +-
 .../GeneralizedExternalProcessor.java           |    37 +-
 .../IndexPlanValidator/IndexListPruner.java     |    20 +
 .../IndexPlanValidator/IndexPlanValidator.java  |    20 +
 .../IndexPlanValidator/IndexTupleGenerator.java |    22 +-
 .../IndexedExecutionPlanGenerator.java          |    20 +
 .../IndexedQueryPlanSelector.java               |    21 +-
 .../ThreshholdPlanSelector.java                 |    21 +-
 .../TupleExecutionPlanGenerator.java            |    25 +-
 .../IndexPlanValidator/TupleReArranger.java     |    20 +
 .../IndexPlanValidator/TupleValidator.java      |    21 +-
 .../ValidIndexCombinationGenerator.java         |    22 +-
 .../VarConstantIndexListPruner.java             |    20 +
 .../java/mvm/rya/indexing/IndexingExpr.java     |    22 +-
 .../rya/indexing/IndexingFunctionRegistry.java  |    21 +-
 .../java/mvm/rya/indexing/IteratorFactory.java  |    20 +
 .../main/java/mvm/rya/indexing/KeyParts.java    |    24 +-
 .../mvm/rya/indexing/PrecompQueryIndexer.java   |    23 +-
 .../java/mvm/rya/indexing/RyaSailFactory.java   |    20 +
 .../java/mvm/rya/indexing/SearchFunction.java   |    22 +-
 .../mvm/rya/indexing/SearchFunctionFactory.java |    22 +-
 .../mvm/rya/indexing/StatementContraints.java   |    31 +-
 .../java/mvm/rya/indexing/TemporalIndexer.java  |    23 +-
 .../java/mvm/rya/indexing/TemporalInstant.java  |    22 +-
 .../java/mvm/rya/indexing/TemporalInterval.java |    20 +
 .../mvm/rya/indexing/accumulo/ConfigUtils.java  |    33 +-
 .../java/mvm/rya/indexing/accumulo/Md5Hash.java |    31 +-
 .../indexing/accumulo/StatementSerializer.java  |    31 +-
 .../accumulo/entity/AccumuloDocIdIndexer.java   |    20 +
 .../accumulo/entity/EntityCentricIndex.java     |    20 +
 .../entity/EntityLocalityGroupSetter.java       |    20 +
 .../accumulo/entity/EntityOptimizer.java        |    20 +
 .../accumulo/entity/EntityTupleSet.java         |    20 +
 .../rya/indexing/accumulo/entity/StarQuery.java |    20 +
 .../freetext/AccumuloFreeTextIndexer.java       |    36 +-
 .../accumulo/freetext/ColumnPrefixes.java       |    31 +-
 .../accumulo/freetext/FreeTextTupleSet.java     |    21 +-
 .../accumulo/freetext/LuceneTokenizer.java      |    31 +-
 .../accumulo/freetext/SimpleTokenizer.java      |    31 +-
 .../indexing/accumulo/freetext/Tokenizer.java   |    33 +-
 .../freetext/iterators/AndingIterator.java      |    31 +-
 .../freetext/iterators/BooleanTreeIterator.java |    31 +-
 .../accumulo/freetext/query/ASTExpression.java  |    31 +-
 .../accumulo/freetext/query/ASTNodeUtils.java   |    31 +-
 .../accumulo/freetext/query/ASTSimpleNode.java  |   915 +-
 .../accumulo/freetext/query/ASTTerm.java        |    31 +-
 .../freetext/query/JJTQueryParserState.java     |   915 +-
 .../indexing/accumulo/freetext/query/Node.java  |   915 +-
 .../accumulo/freetext/query/ParseException.java |   915 +-
 .../accumulo/freetext/query/QueryParser.java    |   915 +-
 .../accumulo/freetext/query/QueryParser.jjt     |    23 +-
 .../freetext/query/QueryParserConstants.java    |   915 +-
 .../freetext/query/QueryParserTokenManager.java |   915 +-
 .../query/QueryParserTreeConstants.java         |   915 +-
 .../freetext/query/SimpleCharStream.java        |   915 +-
 .../accumulo/freetext/query/SimpleNode.java     |   915 +-
 .../indexing/accumulo/freetext/query/Token.java |   915 +-
 .../accumulo/freetext/query/TokenMgrError.java  |   915 +-
 .../rya/indexing/accumulo/geo/GeoConstants.java |    31 +-
 .../accumulo/geo/GeoMesaGeoIndexer.java         |    31 +-
 .../indexing/accumulo/geo/GeoParseUtils.java    |    20 +
 .../rya/indexing/accumulo/geo/GeoTupleSet.java  |    20 +
 .../temporal/AccumuloTemporalIndexer.java       |    20 +
 .../temporal/TemporalInstantRfc3339.java        |    20 +
 .../accumulo/temporal/TemporalTupleSet.java     |    20 +
 .../indexing/external/ExternalIndexMain.java    |    31 +-
 .../indexing/external/ExternalProcessor.java    |    31 +-
 .../mvm/rya/indexing/external/ExternalSail.java |    31 +-
 .../indexing/external/ExternalSailExample.java  |    31 +-
 .../indexing/external/PrecompJoinOptimizer.java |    20 +
 .../external/QueryVariableNormalizer.java       |    20 +
 .../external/tupleSet/AccumuloIndexSet.java     |    31 +-
 .../external/tupleSet/ExternalTupleSet.java     |    35 +-
 .../tupleSet/SimpleExternalTupleSet.java        |    31 +-
 .../indexing/mongodb/AbstractMongoIndexer.java  |    25 +-
 .../mongodb/GeoMongoDBStorageStrategy.java      |    21 +-
 .../rya/indexing/mongodb/MongoGeoIndexer.java   |    24 +-
 .../rya/indexing/mongodb/MongoGeoTupleSet.java  |    20 +
 .../ValidIndexCombinationGeneratorTest.java     |    19 +
 .../DocumentIndexIntersectingIteratorTest.java  |    20 +
 .../GeneralizedExternalProcessorTest.java       |    20 +
 .../IndexPlanValidatorTest.java                 |    20 +
 .../IndexedExecutionPlanGeneratorTest.java      |    20 +
 .../ThreshholdPlanSelectorTest.java             |    20 +
 .../TupleExecutionPlanGeneratorTest.java        |    20 +
 .../IndexPlanValidator/TupleReArrangerTest.java |    20 +
 .../ValidIndexCombinationGeneratorTest.java     |    20 +
 .../VarConstantIndexListPrunerTest.java         |    20 +
 .../accumulo/StatementSerializerTest.java       |    31 +-
 .../accumulo/entity/AccumuloDocIndexerTest.java |    20 +
 .../accumulo/entity/EntityOptimizerTest.java    |    20 +
 .../indexing/accumulo/entity/StarQueryTest.java |    20 +
 .../freetext/AccumuloFreeTextIndexerTest.java   |    31 +-
 .../freetext/query/QueryParserTest.java         |    31 +-
 .../indexing/accumulo/geo/GeoIndexerSfTest.java |    31 +-
 .../indexing/accumulo/geo/GeoIndexerTest.java   |    31 +-
 .../temporal/AccumuloTemporalIndexerTest.java   |    20 +
 .../accumulo/temporal/TemporalInstantTest.java  |    20 +
 .../accumulo/temporal/TemporalIntervalTest.java |    20 +
 .../external/AccumuloConstantIndexSetTest.java  |    20 +
 .../indexing/external/AccumuloIndexSetTest.java |    31 +-
 .../external/AccumuloIndexSetTest2.java         |    20 +
 .../PrecompJoinOptimizerIntegrationTest.java    |    22 +-
 .../external/PrecompJoinOptimizerTest.java      |    20 +
 .../tupleSet/ExternalProcessorTest.java         |    31 +-
 .../tupleSet/QueryVariableNormalizerTest.java   |    31 +-
 .../tupleSet/VarConstExternalProcessorTest.java |    20 +
 .../VarConstQueryVariableNormalizerTest.java    |    20 +
 extras/indexingExample/pom.xml                  |    99 +
 .../src/main/assembly/assembly.xml              |    70 +
 .../src/main/java/EntityDirectExample.java      |   311 +
 .../src/main/java/MongoRyaDirectExample.java    |   307 +
 .../src/main/java/RyaDirectExample.java         |   700 +
 .../src/main/scripts/RunRyaDirectExample.bat    |    41 +
 extras/indexingSailExample/pom.xml              |    80 -
 .../src/main/assembly/assembly.xml              |    50 -
 .../src/main/java/EntityDirectExample.java      |   292 -
 .../src/main/java/MongoRyaDirectExample.java    |   288 -
 .../src/main/java/RyaDirectExample.java         |   681 -
 .../src/main/scripts/RunRyaDirectExample.bat    |    25 -
 extras/pom.xml                                  |    42 +-
 extras/rya.console/pom.xml                      |   106 +-
 .../java/mvm/rya/console/RyaBannerProvider.java |    40 +-
 .../mvm/rya/console/RyaConsoleCommands.java     |    21 +-
 .../rya/console/RyaHistoryFileNameProvider.java |    31 +-
 .../java/mvm/rya/console/RyaPromptProvider.java |    31 +-
 .../META-INF/spring/spring-shell-plugin.xml     |    22 +-
 extras/rya.geo/pom.xml                          |    25 -
 .../src/main/java/mvm/rya/geo/GeoDistance.java  |    34 -
 .../java/mvm/rya/geo/GeoRyaTypeResolver.java    |    16 -
 .../src/main/java/mvm/rya/geo/RyaGeoSchema.java |    16 -
 .../src/main/java/mvm/rya/geo/Verify.java       |    68 -
 .../src/main/java/mvm/rya/geo/WithinRange.java  |    69 -
 .../mvm.rya.api.resolver.RyaTypeResolver        |     1 -
 ...f.query.algebra.evaluation.function.Function |     1 -
 .../mvm/rya/geo/GeoRyaTypeResolverTest.java     |    25 -
 .../test/java/mvm/rya/geo/WithinRangeTest.java  |    43 -
 extras/rya.manual/pom.xml                       |    29 +-
 extras/rya.manual/src/site/markdown/_index.md   |    21 +
 extras/rya.manual/src/site/markdown/alx.md      |    21 +
 .../src/site/markdown/build-source.md           |    21 +
 extras/rya.manual/src/site/markdown/eval.md     |    21 +
 extras/rya.manual/src/site/markdown/index.md    |    21 +
 extras/rya.manual/src/site/markdown/infer.md    |    21 +
 .../src/site/markdown/loadPrecomputedJoin.md    |    21 +
 extras/rya.manual/src/site/markdown/loaddata.md |    21 +
 extras/rya.manual/src/site/markdown/overview.md |    21 +
 .../rya.manual/src/site/markdown/querydata.md   |    21 +
 .../rya.manual/src/site/markdown/quickstart.md  |    21 +
 .../rya.manual/src/site/markdown/sm-addauth.md  |    21 +
 .../src/site/markdown/sm-firststeps.md          |    21 +
 extras/rya.manual/src/site/markdown/sm-infer.md |    21 +
 .../src/site/markdown/sm-namedgraph.md          |    21 +
 .../src/site/markdown/sm-simpleaqr.md           |    21 +
 .../src/site/markdown/sm-sparqlquery.md         |    21 +
 .../src/site/markdown/sm-updatedata.md          |    21 +
 .../src/site/resources/js/fixmarkdownlinks.js   |    21 +-
 extras/rya.manual/src/site/site.xml             |    22 +-
 extras/rya.prospector/pom.xml                   |   178 +-
 .../mvm/rya/prospector/domain/IndexEntry.groovy |    19 +
 .../domain/IntermediateProspect.groovy          |    19 +
 .../rya/prospector/domain/TripleValueType.java  |    22 +-
 .../mvm/rya/prospector/mr/Prospector.groovy     |    19 +
 .../rya/prospector/mr/ProspectorCombiner.groovy |    21 +-
 .../rya/prospector/mr/ProspectorMapper.groovy   |    21 +-
 .../rya/prospector/mr/ProspectorReducer.groovy  |    21 +-
 .../rya/prospector/plans/IndexWorkPlan.groovy   |    21 +-
 .../plans/IndexWorkPlanManager.groovy           |    21 +-
 .../rya/prospector/plans/impl/CountPlan.groovy  |    19 +
 .../ServicesBackedIndexWorkPlanManager.groovy   |    19 +
 .../prospector/service/ProspectorService.groovy |    19 +
 .../ProspectorServiceEvalStatsDAO.groovy        |    19 +
 .../mvm/rya/prospector/utils/CustomEntry.groovy |    19 +
 .../prospector/utils/ProspectorConstants.groovy |    19 +
 .../rya/prospector/utils/ProspectorUtils.groovy |    19 +
 .../joinselect/AccumuloSelectivityEvalDAO.java  |    31 +-
 .../mvm/rya/joinselect/CardinalityCalcUtil.java |    31 +-
 .../mvm/rya/joinselect/mr/FullTableSize.java    |    31 +-
 .../rya/joinselect/mr/JoinSelectAggregate.java  |    38 +-
 .../mvm/rya/joinselect/mr/JoinSelectDriver.java |    31 +-
 .../joinselect/mr/JoinSelectProspectOutput.java |    36 +-
 .../joinselect/mr/JoinSelectSpoTableOutput.java |    36 +-
 .../joinselect/mr/JoinSelectStatisticsSum.java  |    31 +-
 .../mvm/rya/joinselect/mr/utils/CardList.java   |    31 +-
 .../joinselect/mr/utils/CardinalityType.java    |    31 +-
 .../rya/joinselect/mr/utils/CompositeType.java  |    31 +-
 .../mr/utils/JoinSelectConstants.java           |    31 +-
 .../mr/utils/JoinSelectStatsUtil.java           |    31 +-
 .../mvm/rya/joinselect/mr/utils/TripleCard.java |    31 +-
 .../rya/joinselect/mr/utils/TripleEntry.java    |    31 +-
 .../mvm/rya/prospector/mr/ProspectorTest.groovy |    40 +-
 .../ProspectorServiceEvalStatsDAOTest.groovy    |    38 +-
 .../AccumuloSelectivityEvalDAOTest.java         |    31 +-
 .../mr/CardinalityIdentityReducerTest.java      |    31 +-
 .../joinselect/mr/CardinalityMapperTest.java    |    31 +-
 .../rya/joinselect/mr/FullTableSizeTest.java    |    31 +-
 .../mvm/rya/joinselect/mr/JoinReducerTest.java  |    31 +-
 .../rya/joinselect/mr/JoinSelectMapperTest.java |    31 +-
 .../mr/JoinSelectProspectOutputTest.java        |    20 +
 .../mr/JoinSelectStatisticsSumTest.java         |    31 +-
 .../joinselect/mr/JoinSelectStatisticsTest.java |    42 +-
 .../src/test/resources/stats_cluster_config.xml |    21 +
 extras/tinkerpop.rya/pom.xml                    |   100 +-
 .../config/RyaGraphConfiguration.groovy         |    19 +
 .../mvm/rya/blueprints/sail/RyaSailEdge.groovy  |    19 +
 .../blueprints/sail/RyaSailEdgeSequence.groovy  |    19 +
 .../mvm/rya/blueprints/sail/RyaSailGraph.groovy |    19 +
 .../rya/blueprints/sail/RyaSailVertex.groovy    |    19 +
 .../sail/RyaSailVertexSequence.groovy           |    19 +
 .../mvm/rya/blueprints/sail/RyaSailEdge.java    |    31 +-
 .../mvm/rya/blueprints/sail/RyaSailVertex.java  |    31 +-
 .../mvm/rya/blueprints/TstGremlinRya.groovy     |    38 +-
 .../config/RyaGraphConfigurationTest.groovy     |    38 +-
 .../sail/RyaSailVertexSequenceTest.groovy       |    38 +-
 .../src/test/resources/log4j.properties         |    34 +-
 iterators/accumulo.iterators/pom.xml            |    34 -
 .../mvm/rya/iterators/LimitingAgeOffFilter.java |   138 -
 .../rya/iterators/LimitingAgeOffFilterTest.java |    80 -
 iterators/cloudbase.iterators/pom.xml           |    29 -
 .../mvm/rya/iterators/LimitingAgeOffFilter.java |    76 -
 .../rya/iterators/LimitingAgeOffFilterTest.java |    59 -
 iterators/pom.xml                               |    25 -
 osgi/alx.rya.console/pom.xml                    |    36 +-
 .../mvm/rya/alx/command/AbstractRyaCommand.java |    33 +-
 .../alx/command/GetStatementsRyaCommand.java    |    31 +-
 .../mvm/rya/alx/command/InfoRyaCommand.java     |    31 +-
 .../blueprint/alx.rya.console-blueprint.xml     |    20 +
 osgi/alx.rya/pom.xml                            |    46 +-
 .../src/main/features/alx.rya-features.xml      |    22 +-
 .../mvm/rya/alx/util/ConfigurationFactory.java  |    31 +-
 .../META-INF/spring/alx.rya-spring-osgi.xml     |    31 +-
 .../META-INF/spring/alx.rya-spring.xml          |    28 +-
 .../src/main/resources/ROOT/crossdomain.xml     |    22 +-
 osgi/camel.rya/pom.xml                          |    76 +-
 .../mvm/rya/camel/cbsail/CbSailComponent.java   |    31 +-
 .../mvm/rya/camel/cbsail/CbSailEndpoint.java    |    31 +-
 .../mvm/rya/camel/cbsail/CbSailProducer.java    |    31 +-
 .../rya/camel/cbsail/CbSailIntegrationTest.java |    33 +-
 .../mvm/rya/camel/cbsail/CbSailPojoMain.java    |    31 +-
 .../java/mvm/rya/camel/cbsail/CbSailTest.java   |    31 +-
 osgi/pom.xml                                    |    51 +-
 osgi/sesame-runtime-osgi/pom.xml                |    35 +-
 partition/common-query-ext/pom.xml              |    71 -
 .../ext/EncodedSortedRangeIterator.java         |    44 -
 .../src/test/java/GVDateFilterTest.java         |   156 -
 .../src/test/java/GVFrequencyFilterTest.java    |   144 -
 .../src/test/java/IteratorTest.java             |   554 -
 .../src/test/java/JTSFilterTest.java            |   181 -
 .../src/test/java/OGCFilterTest.java            |   163 -
 .../src/test/java/SampleData.java               |   228 -
 .../src/test/java/SampleGVData.java             |   182 -
 .../src/test/java/SampleJTSData.java            |   171 -
 partition/common-query/pom.xml                  |   103 -
 .../iterators/CellLevelFilteringIterator.java   |   163 -
 .../core/iterators/CellLevelRecordIterator.java |   144 -
 .../core/iterators/ConversionIterator.java      |   151 -
 .../iterators/GMDenIntersectingIterator.java    |   363 -
 .../core/iterators/IntersectingIterator.java    |   557 -
 .../core/iterators/IntersectionRange.java       |   330 -
 .../core/iterators/IteratorConstants.java       |    11 -
 .../core/iterators/SortedMinIterator.java       |   173 -
 .../core/iterators/SortedRangeIterator.java     |   136 -
 .../core/iterators/UniqueIterator.java          |    95 -
 .../core/iterators/conversion/Operation.java    |   109 -
 .../core/iterators/filter/CBConverter.java      |   117 -
 .../iterators/filter/general/GVDateFilter.java  |   169 -
 .../filter/general/GVFrequencyFilter.java       |    92 -
 .../core/iterators/filter/jts/JTSFilter.java    |   191 -
 .../core/iterators/filter/ogc/OGCFilter.java    |   241 -
 .../ogc/operation/AbstractComparisonOp.java     |    80 -
 .../filter/ogc/operation/AbstractLogicalOp.java |    40 -
 .../iterators/filter/ogc/operation/And.java     |    29 -
 .../iterators/filter/ogc/operation/BBOX.java    |   125 -
 .../filter/ogc/operation/IOperation.java        |    30 -
 .../iterators/filter/ogc/operation/Not.java     |    35 -
 .../core/iterators/filter/ogc/operation/Or.java |    29 -
 .../filter/ogc/operation/PropertyIsBetween.java |    76 -
 .../filter/ogc/operation/PropertyIsEqualTo.java |    30 -
 .../ogc/operation/PropertyIsGreaterThan.java    |    29 -
 .../PropertyIsGreaterThanOrEqualTo.java         |    29 -
 .../ogc/operation/PropertyIsLessThan.java       |    31 -
 .../operation/PropertyIsLessThanOrEqualTo.java  |    29 -
 .../filter/ogc/operation/PropertyIsLike.java    |   144 -
 .../ogc/operation/PropertyIsNotEqualTo.java     |    30 -
 .../filter/ogc/operation/PropertyIsNull.java    |    38 -
 .../filter/ogc/operation/ShapeFactory.java      |   133 -
 .../core/iterators/filter/ogc/util/GeoUtil.java |    32 -
 .../src/test/java/GVDateFilterTest.java         |   156 -
 .../src/test/java/GVFrequencyFilterTest.java    |   144 -
 .../src/test/java/IteratorTest.java             |   554 -
 .../src/test/java/JTSFilterTest.java            |   181 -
 .../src/test/java/OGCFilterTest.java            |   163 -
 .../common-query/src/test/java/SampleData.java  |   228 -
 .../src/test/java/SampleGVData.java             |   182 -
 .../src/test/java/SampleJTSData.java            |   171 -
 partition/iterator-test/filter.txt              |     6 -
 partition/iterator-test/pom.xml                 |    99 -
 .../src/main/java/dss/webservice/itr/Main.java  |   348 -
 .../src/main/java/dss/webservice/itr/Test.java  |     9 -
 .../dss/webservice/itr/test/AddTestRecords.java |    43 -
 .../dss/webservice/itr/test/BaseTileTest.java   |   132 -
 .../dss/webservice/itr/test/ConversionTest.java |   159 -
 partition/iterator-test/test.sh                 |     3 -
 partition/mr.partition.rdf/pom.xml              |    79 -
 .../mr.partition.rdf/src/main/assembly/job.xml  |    38 -
 .../src/main/groovy/convertrdfdir.groovy        |    33 -
 .../mvm/mmrts/rdf/partition/mr/MrTstBed.java    |   104 -
 .../mr/SparqlPartitionStoreInputFormat.java     |   411 -
 .../rdf/partition/mr/SparqlTestDriver.java      |   155 -
 .../mvm/mmrts/rdf/partition/mr/TestDriver.java  |   154 -
 .../mr/compat/ChangeShardDateFormatTool.java    |   229 -
 .../partition/mr/compat/MoveShardIndexTool.java |   171 -
 .../mr/fileinput/RdfFileInputFormat.java        |   155 -
 .../fileinput/RdfFileInputToCloudbaseTool.java  |   210 -
 .../mr/fileinput/RdfFileInputToFileTool.java    |   159 -
 .../mr/fileinput/bulk/BulkNtripsInputTool.java  |   326 -
 .../bulk/EmbedKeyRangePartitioner.java          |    28 -
 .../iterators/SortedEncodedRangeIterator.java   |    45 -
 .../AggregateTriplesBySubjectCombiner.java      |    31 -
 .../AggregateTriplesBySubjectReducer.java       |    37 -
 .../mr/transform/KeyValueToMapWrMapper.java     |    78 -
 .../mr/transform/SparqlCloudbaseIFJob.java      |   118 -
 .../transform/SparqlCloudbaseIFTransformer.java |   331 -
 .../SparqlCloudbaseIFTransformerConstants.java  |    12 -
 .../compat/ChangeShardDateFormatToolTest.java   |    33 -
 .../mr/fileinput/RdfFileInputToolTest.java      |    80 -
 .../bulk/EmbedKeyRangePartitionerTest.java      |    20 -
 partition/partition.rdf/pom.xml                 |   281 -
 .../InvalidValueTypeMarkerRuntimeException.java |    34 -
 .../rdf/partition/PartitionConnection.java      |   306 -
 .../mmrts/rdf/partition/PartitionConstants.java |   141 -
 .../mvm/mmrts/rdf/partition/PartitionSail.java  |   122 -
 .../rdf/partition/PartitionTripleSource.java    |    40 -
 .../converter/ContextColVisConverter.java       |    14 -
 .../partition/iterators/NamespaceIterator.java  |    93 -
 .../evaluation/FilterTimeIndexVisitor.java      |   113 -
 .../evaluation/PartitionEvaluationStrategy.java |    70 -
 .../ShardSubjectLookupStatementIterator.java    |   493 -
 .../evaluation/SubjectGroupingOptimizer.java    |   178 -
 .../query/evaluation/select/FilterIterator.java |   100 -
 .../evaluation/select/SelectAllIterator.java    |    54 -
 .../query/evaluation/select/SelectIterator.java |   270 -
 .../select/SubjectSelectIterator.java           |    40 -
 .../select/utils/DocumentIterator.java          |   107 -
 .../query/operators/ShardSubjectLookup.java     |   167 -
 .../shard/DateHashModShardValueGenerator.java   |    52 -
 .../partition/shard/ShardValueGenerator.java    |    12 -
 .../partition/utils/ContextsStatementImpl.java  |    30 -
 .../rdf/partition/utils/CountPredObjPairs.java  |    39 -
 .../rdf/partition/utils/PartitionUtils.java     |     9 -
 .../mvm/mmrts/rdf/partition/utils/RdfIO.java    |   166 -
 .../main/resources/partitionTableLoad.cbexec    |     4 -
 .../mmrts/rdf/partition/LoadPartitionData.java  |    79 -
 .../mmrts/rdf/partition/LoadPartitionData2.java |    69 -
 .../mvm/mmrts/rdf/partition/LoadSampleData.java |    64 -
 .../mvm/mmrts/rdf/partition/MemStoreTst.java    |    71 -
 .../rdf/partition/PartitionConnectionTest.java  |   771 -
 .../mmrts/rdf/partition/QueryPartitionData.java |   675 -
 .../java/mvm/mmrts/rdf/partition/TstBed.java    |    90 -
 .../mvm/mmrts/rdf/partition/TstBedGMDen.java    |    94 -
 .../mmrts/rdf/partition/TstDocumentReader.java  |   217 -
 .../mvm/mmrts/rdf/partition/TstScanner.java     |    59 -
 .../DateHashModShardValueGeneratorTest.java     |    30 -
 .../mmrts/rdf/partition/utils/RdfIOTest.java    |    82 -
 partition/web.partition.rdf/cimRdf.xml          | 15740 -----------------
 partition/web.partition.rdf/pom.xml             |    63 -
 .../web/partition/AbstractRDFWebServlet.java    |    66 -
 .../rdf/web/partition/DeleteDataServlet.java    |    46 -
 .../rdf/web/partition/LoadDataServlet.java      |    56 -
 .../rdf/web/partition/QueryDataServlet.java     |   158 -
 .../web/partition/QuerySerqlDataServlet.java    |   116 -
 .../rdf/web/partition/RDFWebConstants.java      |    15 -
 .../src/main/webapp/WEB-INF/web.xml             |    77 -
 .../src/main/webapp/crossdomain.xml             |     5 -
 .../src/main/webapp/serqlQuery.jsp              |    36 -
 .../src/main/webapp/sparqlQuery.jsp             |    47 -
 .../cloudbase/sail/DeleteDataServletRun.java    |   455 -
 .../web/cloudbase/sail/LoadDataServletRun.java  |    51 -
 .../web/cloudbase/sail/QueryDataServletRun.java |   444 -
 .../src/test/resources/cdrdf.xml                |    20 -
 .../src/test/resources/n3trips.txt              |     4 -
 pig/accumulo.pig/pom.xml                        |   111 +-
 .../mvm/rya/accumulo/pig/AccumuloStorage.java   |    33 +-
 .../mvm/rya/accumulo/pig/IndexWritingTool.java  |    31 +-
 .../rya/accumulo/pig/SparqlQueryPigEngine.java  |    31 +-
 .../pig/SparqlToPigTransformVisitor.java        |    31 +-
 .../accumulo/pig/StatementPatternStorage.java   |    31 +-
 .../pig/optimizer/SimilarVarJoinOptimizer.java  |    33 +-
 .../rya/accumulo/pig/AccumuloStorageTest.java   |    31 +-
 .../rya/accumulo/pig/IndexWritingToolTest.java  |    20 +
 .../accumulo/pig/SparqlQueryPigEngineTest.java  |    31 +-
 .../pig/SparqlToPigTransformVisitorTest.java    |    31 +-
 .../pig/StatementPatternStorageTest.java        |    31 +-
 pig/cloudbase.pig/pom.xml                       |    65 -
 .../mvm/rya/cloudbase/pig/CloudbaseStorage.java |   318 -
 .../rya/cloudbase/pig/SparqlQueryPigEngine.java |   237 -
 .../pig/SparqlToPigTransformVisitor.java        |   323 -
 .../cloudbase/pig/StatementPatternStorage.java  |   278 -
 .../rya/cloudbase/pig/dep/CloudbaseStorage.java |   299 -
 .../pig/dep/StatementPatternStorage.java        |   178 -
 .../pig/optimizer/SimilarVarJoinOptimizer.java  |   189 -
 .../mapred/PreferLocalMapTaskSelector.java      |    39 -
 .../cloudbase/pig/CloudbaseInputFormatMain.java |    50 -
 .../rya/cloudbase/pig/CloudbaseStorageTest.java |   250 -
 .../cloudbase/pig/SparqlQueryPigEngineTest.java |    55 -
 .../pig/SparqlToPigTransformVisitorTest.java    |   367 -
 .../pig/StatementPatternStorageTest.java        |   148 -
 pig/pom.xml                                     |    30 +-
 pom.xml                                         |   508 +-
 sail/pom.xml                                    |   100 +-
 sail/rya.sail.impl/pom.xml                      |   126 -
 .../rya/rdftriplestore/RdfCloudTripleStore.java |   178 -
 .../RdfCloudTripleStoreConnection.java          |   622 -
 .../RdfCloudTripleStoreFactory.java             |    55 -
 .../RdfCloudTripleStoreSailConfig.java          |   132 -
 .../rya/rdftriplestore/RyaSailRepository.java   |    52 -
 .../RyaSailRepositoryConnection.java            |   108 -
 .../evaluation/ExternalBatchingIterator.java    |    32 -
 .../ExternalMultipleBindingSetsIterator.java    |   108 -
 .../evaluation/FilterRangeVisitor.java          |    96 -
 .../evaluation/MultipleBindingSetsIterator.java |   107 -
 .../ParallelEvaluationStrategyImpl.java         |   280 -
 .../evaluation/ParallelJoinIterator.java        |   138 -
 .../evaluation/PushJoinDownVisitor.java         |    56 -
 .../evaluation/QueryJoinOptimizer.java          |   283 -
 .../evaluation/QueryJoinSelectOptimizer.java    |   259 -
 ...RdfCloudTripleStoreEvaluationStatistics.java |   280 -
 ...pleStoreSelectivityEvaluationStatistics.java |   127 -
 .../evaluation/ReorderJoinVisitor.java          |    69 -
 .../evaluation/SeparateFilterJoinsVisitor.java  |    54 -
 .../inference/AbstractInferVisitor.java         |   107 -
 .../rdftriplestore/inference/DoNotExpandSP.java |    50 -
 .../inference/InferConstants.java               |    33 -
 .../rya/rdftriplestore/inference/InferJoin.java |    49 -
 .../rdftriplestore/inference/InferUnion.java    |    47 -
 .../inference/InferenceEngine.java              |   409 -
 .../inference/InferenceEngineException.java     |    42 -
 .../inference/InverseOfVisitor.java             |    79 -
 .../rdftriplestore/inference/SameAsVisitor.java |   186 -
 .../inference/SubClassOfVisitor.java            |   107 -
 .../inference/SubPropertyOfVisitor.java         |   120 -
 .../inference/SymmetricPropertyVisitor.java     |    77 -
 .../inference/TransitivePropertyVisitor.java    |    68 -
 .../namespace/NamespaceManager.java             |   166 -
 .../utils/CombineContextsRdfInserter.java       |   164 -
 .../rdftriplestore/utils/DefaultStatistics.java |    57 -
 .../utils/FixedStatementPattern.java            |    58 -
 .../utils/TransitivePropertySP.java             |    51 -
 .../META-INF/org.openrdf.store.schemas          |     1 -
 .../META-INF/schemas/cloudbasestore-schema.ttl  |    20 -
 .../org.openrdf.sail.config.SailFactory         |     1 -
 .../src/main/resources/ehcache.xml              |    26 -
 .../java/mvm/rya/ArbitraryLengthQueryTest.java  |   499 -
 .../src/test/java/mvm/rya/HashJoinTest.java     |   373 -
 .../test/java/mvm/rya/IterativeJoinTest.java    |   364 -
 .../src/test/java/mvm/rya/MergeJoinTest.java    |   369 -
 .../rya/RdfCloudTripleStoreConnectionTest.java  |  1362 --
 .../java/mvm/rya/RdfCloudTripleStoreTest.java   |   698 -
 .../mvm/rya/RdfCloudTripleStoreUtilsTest.java   |    86 -
 .../QueryJoinSelectOptimizerTest.java           |   991 --
 ...toreSelectivityEvaluationStatisticsTest.java |   303 -
 .../rya/triplestore/inference/SameAsTest.java   |   114 -
 sail/rya.sail.impl/src/test/resources/cdrdf.xml |    40 -
 .../src/test/resources/namedgraphs.trig         |    37 -
 sail/rya.sail.impl/src/test/resources/ntriples  |     1 -
 .../src/test/resources/reification.xml          |    35 -
 .../src/test/resources/univ-bench.owl           |   466 -
 .../rya/rdftriplestore/RdfCloudTripleStore.java |   179 +
 .../RdfCloudTripleStoreConnection.java          |   623 +
 .../RdfCloudTripleStoreFactory.java             |    56 +
 .../RdfCloudTripleStoreSailConfig.java          |   133 +
 .../rya/rdftriplestore/RyaSailRepository.java   |    53 +
 .../RyaSailRepositoryConnection.java            |   109 +
 .../evaluation/ExternalBatchingIterator.java    |    33 +
 .../ExternalMultipleBindingSetsIterator.java    |   109 +
 .../evaluation/FilterRangeVisitor.java          |    97 +
 .../evaluation/MultipleBindingSetsIterator.java |   108 +
 .../ParallelEvaluationStrategyImpl.java         |   281 +
 .../evaluation/ParallelJoinIterator.java        |   139 +
 .../evaluation/PushJoinDownVisitor.java         |    57 +
 .../evaluation/QueryJoinOptimizer.java          |   284 +
 .../evaluation/QueryJoinSelectOptimizer.java    |   260 +
 ...RdfCloudTripleStoreEvaluationStatistics.java |   281 +
 ...pleStoreSelectivityEvaluationStatistics.java |   128 +
 .../evaluation/ReorderJoinVisitor.java          |    70 +
 .../evaluation/SeparateFilterJoinsVisitor.java  |    55 +
 .../inference/AbstractInferVisitor.java         |   108 +
 .../rdftriplestore/inference/DoNotExpandSP.java |    51 +
 .../inference/InferConstants.java               |    34 +
 .../rya/rdftriplestore/inference/InferJoin.java |    50 +
 .../rdftriplestore/inference/InferUnion.java    |    48 +
 .../inference/InferenceEngine.java              |   410 +
 .../inference/InferenceEngineException.java     |    43 +
 .../inference/InverseOfVisitor.java             |    80 +
 .../rdftriplestore/inference/SameAsVisitor.java |   187 +
 .../inference/SubClassOfVisitor.java            |   108 +
 .../inference/SubPropertyOfVisitor.java         |   121 +
 .../inference/SymmetricPropertyVisitor.java     |    78 +
 .../inference/TransitivePropertyVisitor.java    |    69 +
 .../namespace/NamespaceManager.java             |   167 +
 .../utils/CombineContextsRdfInserter.java       |   165 +
 .../rdftriplestore/utils/DefaultStatistics.java |    58 +
 .../utils/FixedStatementPattern.java            |    59 +
 .../utils/TransitivePropertySP.java             |    52 +
 .../META-INF/org.openrdf.store.schemas          |     1 +
 .../META-INF/schemas/cloudbasestore-schema.ttl  |    20 +
 .../org.openrdf.sail.config.SailFactory         |     1 +
 sail/src/main/resources/ehcache.xml             |    46 +
 .../java/mvm/rya/ArbitraryLengthQueryTest.java  |   500 +
 sail/src/test/java/mvm/rya/HashJoinTest.java    |   374 +
 .../test/java/mvm/rya/IterativeJoinTest.java    |   365 +
 sail/src/test/java/mvm/rya/MergeJoinTest.java   |   370 +
 .../rya/RdfCloudTripleStoreConnectionTest.java  |  1363 ++
 .../java/mvm/rya/RdfCloudTripleStoreTest.java   |   699 +
 .../mvm/rya/RdfCloudTripleStoreUtilsTest.java   |    86 +
 .../QueryJoinSelectOptimizerTest.java           |   992 ++
 ...toreSelectivityEvaluationStatisticsTest.java |   304 +
 .../rya/triplestore/inference/SameAsTest.java   |   115 +
 sail/src/test/resources/cdrdf.xml               |    41 +
 sail/src/test/resources/namedgraphs.trig        |    37 +
 sail/src/test/resources/ntriples.nt             |     1 +
 sail/src/test/resources/reification.xml         |    36 +
 sail/src/test/resources/univ-bench.owl          |   466 +
 utils/cloudbase.utils/pom.xml                   |    67 -
 .../utils/bulk/KeyRangePartitioner.java         |    35 -
 .../utils/filters/TimeRangeFilter.java          |    64 -
 .../input/CloudbaseBatchScannerInputFormat.java |   872 -
 .../utils/scanner/BatchScannerList.java         |   108 -
 .../cloudbase/utils/scanner/RangesScanner.java  |   236 -
 .../cloudbase/utils/shard/HashAlgorithm.java    |     9 -
 .../utils/shard/HashCodeHashAlgorithm.java      |    14 -
 .../utils/shard/ShardedBatchWriter.java         |    51 -
 .../cloudbase/utils/shard/ShardedConnector.java |   158 -
 ...mentTabletServerBatchReaderIteratorTest.java |    15 -
 .../utils/filters/TimeRangeFilterTest.java      |    53 -
 .../utils/shard/ShardedConnectorTest.java       |    76 -
 utils/pom.xml                                   |    22 -
 web/pom.xml                                     |    30 +-
 web/web.rya/pom.xml                             |   163 +-
 web/web.rya/resources/environment.properties    |    19 +-
 .../cloudbase/sail/AbstractRDFWebServlet.java   |    34 +-
 .../web/cloudbase/sail/DeleteDataServlet.java   |    34 +-
 .../rdf/web/cloudbase/sail/LoadDataServlet.java |    34 +-
 .../web/cloudbase/sail/QueryDataServlet.java    |    34 +-
 .../cloudbase/sail/QuerySerqlDataServlet.java   |    34 +-
 .../rdf/web/cloudbase/sail/RDFWebConstants.java |    34 +-
 .../mvm/cloud/rdf/web/sail/RdfController.java   |    31 +-
 .../mvm/cloud/rdf/web/sail/ResultFormat.java    |    31 +-
 .../rdf/web/sail/SecurityProviderImpl.java      |    20 +
 .../webapp/WEB-INF/spring/spring-accumulo.xml   |    20 +
 .../webapp/WEB-INF/spring/spring-cloudbase.xml  |    20 +
 .../webapp/WEB-INF/spring/spring-mongodb.xml    |    20 +
 .../WEB-INF/spring/spring-root-extensions.xml   |    20 +
 .../main/webapp/WEB-INF/spring/spring-root.xml  |    20 +
 .../webapp/WEB-INF/spring/spring-security.xml   |    20 +
 web/web.rya/src/main/webapp/WEB-INF/web.xml     |    20 +
 web/web.rya/src/main/webapp/crossdomain.xml     |    22 +-
 web/web.rya/src/main/webapp/sparqlQuery.jsp     |    21 +-
 .../cloudbase/sail/DeleteDataServletRun.java    |    31 +-
 .../web/cloudbase/sail/LoadDataServletRun.java  |    31 +-
 .../web/cloudbase/sail/QueryDataServletRun.java |    31 +-
 .../web/sail/RdfControllerIntegrationTest.java  |    20 +
 .../cloud/rdf/web/sail/RdfControllerTest.java   |    31 +-
 web/web.rya/src/test/resources/cdrdf.xml        |    39 +-
 .../controllerIntegrationTest-accumulo.xml      |    37 +-
 .../controllerIntegrationTest-root.xml          |    21 +
 .../test/resources/controllerTest-context.xml   |    37 +-
 793 files changed, 31450 insertions(+), 63210 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/NOTICE
----------------------------------------------------------------------
diff --git a/NOTICE b/NOTICE
new file mode 100644
index 0000000..7148307
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1,6 @@
+Apache Rya
+Copyright 2015 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
+

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index bc548a0..7869bef 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,20 @@
+<!-- Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License. -->
+
 # RYA
 
 ## Overview

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index 478c6db..c14d712 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -1,17 +1,39 @@
 <?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.common</artifactId>
+    <name>Apache Rya Common Projects</name>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <modules>
         <module>rya.api</module>
-      <module>rya.provenance</module>
+        <module>rya.provenance</module>
     </modules>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/pom.xml
----------------------------------------------------------------------
diff --git a/common/rya.api/pom.xml b/common/rya.api/pom.xml
index 386d2c8..7c90521 100644
--- a/common/rya.api/pom.xml
+++ b/common/rya.api/pom.xml
@@ -1,44 +1,58 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.common</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
 
     <artifactId>rya.api</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
+    <name>Apache Rya Common API</name>
+
     <dependencies>
         <dependency>
             <groupId>org.calrissian.mango</groupId>
             <artifactId>mango-core</artifactId>
         </dependency>
+
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-model</artifactId>
-            <version>${openrdf.sesame.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
         </dependency>
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-query</artifactId>
-            <version>${openrdf.sesame.version}</version>
         </dependency>
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-queryalgebra-model</artifactId>
-            <version>${openrdf.sesame.version}</version>
         </dependency>
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-queryalgebra-evaluation</artifactId>
-            <version>${openrdf.sesame.version}</version>
         </dependency>
+
         <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
@@ -48,13 +62,15 @@
             <artifactId>hadoop-common</artifactId>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
-        </dependency>
-        <dependency>
             <groupId>joda-time</groupId>
             <artifactId>joda-time</artifactId>
         </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java b/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java
index 4d8ce5a..eea50f4 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/InvalidValueTypeMarkerRuntimeException.java
@@ -1,25 +1,26 @@
 package mvm.rya.api;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Class InvalidValueTypeMarkerRuntimeException
  * Date: Jan 7, 2011

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java
index 077e268..1d0e165 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConfiguration.java
@@ -1,25 +1,26 @@
 package mvm.rya.api;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.List;
 
 import mvm.rya.api.layout.TableLayoutStrategy;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java
index d04ba27..5311bd9 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreConstants.java
@@ -1,25 +1,26 @@
 package mvm.rya.api;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.domain.RyaSchema;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaURI;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java
index fa2bf5f..4a13c01 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreStatement.java
@@ -1,25 +1,26 @@
 package mvm.rya.api;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Resource;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java
index 2d04c11..eeadb9b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/RdfCloudTripleStoreUtils.java
@@ -1,25 +1,26 @@
 package mvm.rya.api;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.layout.TableLayoutStrategy;
 import mvm.rya.api.layout.TablePrefixLayoutStrategy;
 import org.openrdf.model.Literal;
@@ -416,4 +417,4 @@ public class RdfCloudTripleStoreUtils {
             return null;
     }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java
index e73a71b..199b63d 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/date/DateTimeTtlValueConverter.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.date;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Value;
 
 import javax.xml.datatype.DatatypeConfigurationException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java
index 297cf5c..de4ff8b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlStrValueConverter.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.date;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Value;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java
index a709148..75366dc 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/date/TimestampTtlValueConverter.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.date;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Value;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java b/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java
index 078d0f6..1ba9841 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/date/TtlValueConverter.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.date;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Value;
 import org.openrdf.model.ValueFactory;
 import org.openrdf.model.impl.ValueFactoryImpl;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java b/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java
index dea3a76..f5ca08c 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/Node.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.impl.URIImpl;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java
index f3e2302..67d5742 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RangeURI.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.URI;
 import org.openrdf.model.Value;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java
index 67c37e7..c27edfd 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RangeValue.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Value;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java
index 2214044..e99f451 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaRange.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Date: 7/17/12
  * Time: 10:02 AM

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java
index 380657c..6744d20 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaSchema.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 import org.openrdf.model.URI;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java
index 439eee6..18bde98 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaStatement.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Arrays;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java
index 9af0cc7..ab580d6 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaType.java
@@ -1,26 +1,27 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
 
+
+
 import org.openrdf.model.URI;
 import org.openrdf.model.vocabulary.XMLSchema;
 



[54/56] [abbrv] incubator-rya git commit: RYA-25 Apache RAT check during maven build

Posted by mi...@apache.org.
RYA-25 Apache RAT check during maven build

Modified the parent POM so that the Apache RAT plugin runs by default;
modified log4j properties for blueprints to put temp log in target;
removed extraneous shading.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/1eae9015
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/1eae9015
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/1eae9015

Branch: refs/heads/master
Commit: 1eae9015ede1a95a881fd3bc26476a244695e030
Parents: e6be84a
Author: Aaron Mihalik <mi...@alum.mit.edu>
Authored: Fri Dec 18 15:24:13 2015 -0500
Committer: Aaron Mihalik <mi...@alum.mit.edu>
Committed: Fri Dec 18 15:24:13 2015 -0500

----------------------------------------------------------------------
 .../src/test/resources/log4j.properties         | 10 +++++++
 pom.xml                                         | 31 ++++++++++----------
 2 files changed, 26 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/1eae9015/extras/tinkerpop.rya/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/test/resources/log4j.properties b/extras/tinkerpop.rya/src/test/resources/log4j.properties
index 598d7b5..7142ee3 100644
--- a/extras/tinkerpop.rya/src/test/resources/log4j.properties
+++ b/extras/tinkerpop.rya/src/test/resources/log4j.properties
@@ -16,4 +16,14 @@
 # under the License.
 
 
+log4j.debug=false
 
+#log4j.rootLogger=INFO, console
+log4j.category.org.openrdf=INFO, mob
+log4j.category.org.xml=INFO, mob
+
+log4j.appender.mob=org.apache.log4j.FileAppender
+log4j.appender.mob.file=target/blueprints.log
+log4j.appender.mob.append=true
+log4j.appender.mob.layout=org.apache.log4j.PatternLayout
+log4j.appender.mob.layout.ConversionPattern=%d [%t] %p %C{1} - %m%n

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/1eae9015/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index dce799f..dcbba4c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -608,12 +608,20 @@ under the License.
                         </execution>
                     </executions>
                 </plugin>
-                <plugin>
-                    <!-- Apache Release Audit Tool - reports missing license headers and other issues. -->
-                    <!-- mvn apache-rat:rat -->
-                    <!-- mvn apache-rat:check -->
-                    <groupId>org.apache.rat</groupId>
-                    <artifactId>apache-rat-plugin</artifactId>
+            </plugins>
+        </pluginManagement>
+
+        <plugins>
+            <plugin>
+                <!-- Apache Release Audit Tool - reports missing license headers and other issues. -->
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>check-licenses</id>
+                        <goals>
+                            <goal>check</goal>
+                        </goals>
                     <configuration>
                         <excludes>
                             <!-- RDF data Files -->
@@ -627,15 +635,8 @@ under the License.
                             <exclude>**/resources/META-INF/services/**</exclude>
                         </excludes>
                     </configuration>
-                </plugin>
-            </plugins>
-        </pluginManagement>
-
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>2.3</version>
+                    </execution>
+                </executions>
             </plugin>
             <plugin>
                 <artifactId>maven-source-plugin</artifactId>


[30/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/sm-infer.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/sm-infer.md b/extras/rya.manual/src/site/markdown/sm-infer.md
index 712af78..a2b0b66 100644
--- a/extras/rya.manual/src/site/markdown/sm-infer.md
+++ b/extras/rya.manual/src/site/markdown/sm-infer.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Inferencing
 
 Rya currently provides simple inferencing. The supported list of inferred relationships include:

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/sm-namedgraph.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/sm-namedgraph.md b/extras/rya.manual/src/site/markdown/sm-namedgraph.md
index f869adc..6826345 100644
--- a/extras/rya.manual/src/site/markdown/sm-namedgraph.md
+++ b/extras/rya.manual/src/site/markdown/sm-namedgraph.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Named Graphs
 
 Named graphs are supported simply in the Rdf Store in a few ways. OpenRdf supports sending `contexts` as each triple is saved.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/sm-simpleaqr.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/sm-simpleaqr.md b/extras/rya.manual/src/site/markdown/sm-simpleaqr.md
index a509195..cb8f068 100644
--- a/extras/rya.manual/src/site/markdown/sm-simpleaqr.md
+++ b/extras/rya.manual/src/site/markdown/sm-simpleaqr.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Simple Add Query and Remove of Statements
 
 This quick tutorial will give a small example on how to add, query, and remove statements from Rya

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/sm-sparqlquery.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/sm-sparqlquery.md b/extras/rya.manual/src/site/markdown/sm-sparqlquery.md
index cff732a..639ca02 100644
--- a/extras/rya.manual/src/site/markdown/sm-sparqlquery.md
+++ b/extras/rya.manual/src/site/markdown/sm-sparqlquery.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Simple Add Query and Remove of Statements
 
 This quick tutorial will give a small example on how to query data with SPARQL

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/sm-updatedata.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/sm-updatedata.md b/extras/rya.manual/src/site/markdown/sm-updatedata.md
index 7227f54..f0fe664 100644
--- a/extras/rya.manual/src/site/markdown/sm-updatedata.md
+++ b/extras/rya.manual/src/site/markdown/sm-updatedata.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Sparql Update
 
 OpenRDF supports the Sparql Update functionality. Here are a few samples:

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js b/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js
index 484c5d3..7fe8834 100644
--- a/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js
+++ b/extras/rya.manual/src/site/resources/js/fixmarkdownlinks.js
@@ -1,6 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 window.onload = function() {
     var anchors = document.getElementsByTagName("a");
         for (var i = 0; i < anchors.length; i++) {
             anchors[i].href = anchors[i].href.replace(/\.md$/,'\.html');
         }
-    }
\ No newline at end of file
+    }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/site.xml
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/site.xml b/extras/rya.manual/src/site/site.xml
index 19078a0..a671d3d 100644
--- a/extras/rya.manual/src/site/site.xml
+++ b/extras/rya.manual/src/site/site.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="ISO-8859-1"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project name="Maven">
   <skin>
     <groupId>org.apache.maven.skins</groupId>
@@ -42,4 +62,4 @@
         <item name="Building From Source" href="build-source.html"/>
     </menu>
   </body>
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/pom.xml b/extras/rya.prospector/pom.xml
index 7acd2f6..a9b5c61 100644
--- a/extras/rya.prospector/pom.xml
+++ b/extras/rya.prospector/pom.xml
@@ -1,25 +1,46 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.extras</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <artifactId>rya.prospector</artifactId>
+    <name>Apache Rya Prospector</name>
+
     <dependencies>
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>rya.api</artifactId>
-            <exclusions>
-				<exclusion>
-					<artifactId>mockito-all</artifactId>
-					<groupId>org.mockito</groupId>
-				</exclusion>
-			</exclusions>
         </dependency>
         <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>accumulo.rya</artifactId>
+        </dependency>
+
+        <dependency>
             <groupId>commons-lang</groupId>
             <artifactId>commons-lang</artifactId>
         </dependency>
@@ -28,104 +49,61 @@
             <artifactId>guava</artifactId>
         </dependency>
         <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>accumulo.rya</artifactId>
-        </dependency>
-        <dependency>
             <groupId>org.codehaus.groovy</groupId>
             <artifactId>groovy-all</artifactId>
         </dependency>
+
         <dependency>
             <groupId>org.apache.mrunit</groupId>
             <artifactId>mrunit</artifactId>
-            <version>1.1.0</version>
             <classifier>hadoop2</classifier>
             <scope>test</scope>
         </dependency>
-        
     </dependencies>
+
     <build>
- <plugins>
-<plugin>
-<artifactId>maven-compiler-plugin</artifactId>
-<!-- 2.8.0-01 and later require maven-compiler-plugin 3.1 or higher -->
-<version>3.1</version>
-<configuration>
-<compilerId>groovy-eclipse-compiler</compilerId>
-</configuration>
-<dependencies>
-<dependency>
-<groupId>org.codehaus.groovy</groupId>
-<artifactId>groovy-eclipse-compiler</artifactId>
-<version>2.9.1-01</version>
-</dependency>
-<!-- for 2.8.0-01 and later you must have an explicit dependency on groovy-eclipse-batch -->
-<dependency>
-<groupId>org.codehaus.groovy</groupId>
-<artifactId>groovy-eclipse-batch</artifactId>
-<version>2.3.7-01</version>
-</dependency>
-</dependencies>
-</plugin>
-<plugin>
-<groupId>org.codehaus.groovy</groupId>
-<artifactId>groovy-eclipse-compiler</artifactId>
-<version>2.9.1-01</version>
-<extensions>true</extensions>
-</plugin>
-<plugin>
-<groupId>org.apache.maven.plugins</groupId>
-<artifactId>maven-shade-plugin</artifactId>
-<executions>
-<execution>
-<configuration>
-<shadedArtifactAttached>true</shadedArtifactAttached>
-<shadedClassifierName>map-reduce</shadedClassifierName>
-<transformers>
-<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-</transformers>
-</configuration>
-</execution>
-</executions>
-</plugin>
-</plugins>
-     </build>
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>accumulo.iterators</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                    <optional>true</optional>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>cloudbase.iterators</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
-</project>
\ No newline at end of file
+        <plugins>
+            <plugin>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <configuration>
+                    <compilerId>groovy-eclipse-compiler</compilerId>
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.codehaus.groovy</groupId>
+                        <artifactId>groovy-eclipse-compiler</artifactId>
+                        <version>2.9.1-01</version>
+                    </dependency>
+                    <!-- for 2.8.0-01 and later you must have an explicit 
+                        dependency on groovy-eclipse-batch -->
+                    <dependency>
+                        <groupId>org.codehaus.groovy</groupId>
+                        <artifactId>groovy-eclipse-batch</artifactId>
+                        <version>2.3.7-01</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.groovy</groupId>
+                <artifactId>groovy-eclipse-compiler</artifactId>
+                <version>2.9.1-01</version>
+                <extensions>true</extensions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <configuration>
+                            <shadedArtifactAttached>true</shadedArtifactAttached>
+                            <shadedClassifierName>map-reduce</shadedClassifierName>
+                            <transformers>
+                                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy
index ae08089..6017da4 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IndexEntry.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.domain
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy
index f80ac93..fadf6e8 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/IntermediateProspect.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.domain
 
 import org.apache.hadoop.io.WritableComparable

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java
index c2f0a0e..183b0d2 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/domain/TripleValueType.java
@@ -1,6 +1,26 @@
 package mvm.rya.prospector.domain;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 public enum TripleValueType {
 
     subject, predicate, object, entity, subjectpredicate, predicateobject, subjectobject
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy
index 54ffcf9..6c4a055 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/Prospector.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.mr
 
 import mvm.rya.prospector.utils.ProspectorUtils

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy
index 65ec71b..fe1c5b2 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorCombiner.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.mr
 
 import mvm.rya.prospector.plans.IndexWorkPlan
@@ -39,4 +58,4 @@ class ProspectorCombiner extends Reducer {
             }
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy
index 1eef226..18fa32b 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorMapper.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.mr
 
 import mvm.rya.accumulo.AccumuloRdfConfiguration
@@ -53,4 +72,4 @@ class ProspectorMapper extends Mapper {
             }
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy
index 8beabcf..8b12aae 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/mr/ProspectorReducer.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.mr
 
 import mvm.rya.prospector.plans.IndexWorkPlan
@@ -35,4 +54,4 @@ class ProspectorReducer extends Reducer {
             plan.reduce(prospect, values, truncatedDate, context)
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy
index 0e53f6d..d9ba719 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlan.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.plans
 
 import mvm.rya.api.domain.RyaStatement
@@ -29,4 +48,4 @@ public interface IndexWorkPlan {
 	
     public List<IndexEntry> query(def connector, String tableName, List<Long> prospectTimes, String type, String index, String dataType, String[] auths)
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy
index 915b66a..555f84a 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/IndexWorkPlanManager.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.plans
 
 /**
@@ -7,4 +26,4 @@ package mvm.rya.prospector.plans
 public interface IndexWorkPlanManager {
 
     public Collection<IndexWorkPlan> getPlans();
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy
index ae64e3b..091c295 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/CountPlan.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.plans.impl
 
 import mvm.rya.api.domain.RyaStatement

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy
index 931f6a7..6f3f7a6 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/plans/impl/ServicesBackedIndexWorkPlanManager.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.plans.impl
 
 import mvm.rya.prospector.plans.IndexWorkPlan

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy
index eb3a975..bb8ceb4 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorService.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.service
 
 import mvm.rya.prospector.utils.ProspectorUtils

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy
index 4e9c3d1..3e8aba1 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAO.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.service
 
 import mvm.rya.api.RdfCloudTripleStoreConfiguration

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy
index 4d7ae1d..c550b92 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/CustomEntry.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.utils
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy
index edca753..197e735 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorConstants.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.utils
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy
index ba90fa2..640f17e 100644
--- a/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy
+++ b/extras/rya.prospector/src/main/groovy/mvm/rya/prospector/utils/ProspectorUtils.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.utils
 
 import org.apache.accumulo.core.client.Connector

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java
index 2551625..0ed8026 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAO.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java
index f49ba13..a54a5af 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/CardinalityCalcUtil.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.ArrayList;
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java
index c84130b..5d3d643 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/FullTableSize.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SELECTIVITY_TABLE;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_TABLE;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java
index 165b18d..bb227f3 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectAggregate.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.OUTPUTPATH;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_OUTPUTPATH;
@@ -39,6 +40,7 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Partitioner;
 import org.apache.hadoop.mapreduce.Reducer;
@@ -250,10 +252,10 @@ public class JoinSelectAggregate extends Configured implements Tool {
 
     assert inPath1 != null && inPath2 != null && outPath != null;
 
-    Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+    Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
     job.setJarByClass(this.getClass());
-    job.setUserClassesTakesPrecedence(true);
-
+    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
+    
     JoinSelectStatsUtil.initJoinMRJob(job, inPath1, inPath2, JoinSelectAggregateMapper.class, outPath, auths);
 
     job.setSortComparatorClass(JoinSelectSortComparator.class);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java
index c97954d..e6a89ce 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectDriver.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.Tool;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java
index 23838d3..a12793d 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectProspectOutput.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_OUTPUTPATH;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_TABLE;
@@ -41,6 +42,7 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.util.Tool;
 
@@ -104,9 +106,9 @@ public class JoinSelectProspectOutput extends Configured implements Tool {
 
     assert inTable != null && outPath != null;
 
-    Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+    Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
     job.setJarByClass(this.getClass());
-    job.setUserClassesTakesPrecedence(true);
+    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
 
     JoinSelectStatsUtil.initTabToSeqFileJob(job, inTable, outPath, auths);
     job.setMapperClass(CardinalityMapper.class);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java
index f968572..f7b1672 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectSpoTableOutput.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_OUTPUTPATH;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SPO_TABLE;
@@ -44,6 +45,7 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.util.Tool;
 
@@ -108,9 +110,9 @@ public class JoinSelectSpoTableOutput extends Configured implements Tool {
 
     assert inTable != null && outPath != null;
 
-    Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+    Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
     job.setJarByClass(this.getClass());
-    job.setUserClassesTakesPrecedence(true);
+    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
 
     JoinSelectStatsUtil.initTabToSeqFileJob(job, inTable, outPath, auths);
     job.setMapperClass(JoinSelectMapper.class);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java
index c972b83..ef271ff 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSum.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.INPUTPATH;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.SELECTIVITY_TABLE;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java
index a0aa967..b8fd274 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardList.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr.utils;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java
index fd62b52..924f596 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CardinalityType.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr.utils;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java
index 8a40e6a..57e6ee2 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/CompositeType.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr.utils;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java
index 6eac9e3..3ec34d0 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectConstants.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr.utils;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 public class JoinSelectConstants {
 
   public static final String COUNT = "count";

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java
index 59e7611..cf8db40 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/JoinSelectStatsUtil.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr.utils;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV;
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java
index 3843600..467f754 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleCard.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr.utils;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java
index 8f3769c..7deb346 100644
--- a/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java
+++ b/extras/rya.prospector/src/main/java/mvm/rya/joinselect/mr/utils/TripleEntry.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr.utils;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy b/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy
index f3ef96e..766a239 100644
--- a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy
+++ b/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/mr/ProspectorTest.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.mr
 
 import com.google.common.collect.Iterators
@@ -38,25 +57,6 @@ class ProspectorTest {
 
         Instance mock = new MockInstance("accumulo");
 
-/*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
         def connector = mock.getConnector("user", "pass".bytes)
         def intable = "rya_spo"
         def outtable = "rya_prospects"
@@ -175,4 +175,4 @@ class ProspectorTest {
             println it
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy b/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy
index 275c6d5..5bbbee8 100644
--- a/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy
+++ b/extras/rya.prospector/src/test/groovy/mvm/rya/prospector/service/ProspectorServiceEvalStatsDAOTest.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.prospector.service
 
 import com.google.common.collect.Iterators
@@ -33,25 +52,6 @@ class ProspectorServiceEvalStatsDAOTest {
 
         Instance mock = new MockInstance("accumulo");
 
-/*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
         def connector = mock.getConnector("user", "pass".bytes)
         def intable = "rya_spo"
         def outtable = "rya_prospects"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java
index 007af96..f40b63f 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/AccumuloSelectivityEvalDAOTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.math.BigDecimal;
 import java.math.MathContext;
 import java.util.ArrayList;



[27/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/SampleJTSData.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/SampleJTSData.java b/partition/common-query-ext/src/test/java/SampleJTSData.java
deleted file mode 100644
index 41df658..0000000
--- a/partition/common-query-ext/src/test/java/SampleJTSData.java
+++ /dev/null
@@ -1,171 +0,0 @@
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-// For use in testing the Date Filter and Frequency Filter classes
-public class SampleJTSData
-{
-
-  public static int NUM_PARTITIONS = 2;
-
-
-  public static Connector initConnector()
-  {
-    Instance instance = new MockInstance();
-
-    try
-    {
-      Connector connector = instance.getConnector("root", "password".getBytes());
-
-      // set up table
-      connector.tableOperations().create("partition");
-
-      // set up root's auths
-      connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-
-      return connector;
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableExistsException e)
-    {
-      e.printStackTrace();
-    }
-
-    return null;
-  }
-
-  public static Collection<Map<String, String>> sampleData()
-  {
-    List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-    Map<String, String> item;
-
-    item = new HashMap<String, String>();
-    item.put("geometry-contour",  "SDO_GEOMETRY(2007, 8307, NULL, SDO_ELEM_INFO_ARRAY(1, 1003, 1), SDO_ORDINATE_ARRAY(91.985, -12.108, 94.657, -12.059, 98.486, -11.988, 101.385, -12.296, 102.911, -12.569, 103.93, -12.852, 105.005, -12.531, 106.37, -12.204, 108.446, -11.503, 109.585, -10.88, 110.144, -10.207, 108.609, -9.573, 106.05, -8.535, 104.145, -7.606, 102.191, -7.522, 99.522, -7.691, 97.64, -7.606, 95.482, -7.947, 94.546, -8.084, 92.465, -8.605, 90.554, -9.366, 90.197, -10.436, 89.84, -11.729, 90.554, -12.175, 91.985, -12.108))");
-    item.put("beam-name",    "OPTUS D1 Ku-BAND NATIONAL A & B AUSTRALIA Downlink");
-    list.add(item);
-    //This is Australia
-    //Points like 22S 135E are in the beam
-
-    //This one is like GV
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "AMC 1 Ku-BAND ZONAL NORTH AMERICA Down HV");
-    item.put("geometry-contour",   "SDO_GEOMETRY(2007, 8307, NULL, SDO_ELEM_INFO_ARRAY(1, 1003, 1), SDO_ORDINATE_ARRAY(-70.838, 39.967, -70.506, 40.331, -70.698, 41.679, -71.179, 42.401, -71.578, 42.38, -72.994, 42.924, -74.353, 43.242, -75.715, 43.26, -77.318, 42.981, -78.684, 42.774, -80.05, 42.491, -82.005, 42.517, -83.608, 42.312, -84.977, 41.805, -86.58, 41.525, -88.127, 41.02, -89.731, 40.741, -90.905, 41.582, -92.264, 41.9, -93.861, 42.147, -95.411, 41.341, -96.257, 40.076, -97.222, 38.737, -98.011, 37.17, -98.031, 35.593, -97.691, 34.312, -96.875, 33.25, -97.307, 31.904, -97.916, 30.561, -98.702, 29.295, -99.134, 27.949, -98.14, 26.884, -97.205, 25.821, -95.842, 25.803, -94.42, 25.784, -92.876, 26.064, -91.277, 26.043, -90.085, 26.553, -88.729, 26.01, -87.38, 24.941, -86.031, 23.797, -84.616, 23.253, -83.256, 23.01, -81.887, 23.517, -80.866, 24.555, -80.254, 26.124, -79.642, 27.693, -78.444, 28.728, -77.486, 29.542, -76.463, 30.805, -76.088, 32.377, -75.656, 33.723, -76.051,
  35.305, -75.442, 36.649, -74.426, 37.386, -73.228, 38.422, -72.032, 39.232, -70.838, 39.967))");
-    list.add(item);
-    //This is North America
-    //Points  39°44'21.00"N 104°59'3.00"W (Denver) are in the footprint
-
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "testa");
-    item.put("beam-footprint",   "MULTIPOLYGON (((-169.286 40.431, -164.971 39.992, -155.397 38.482, -146.566 36.233, -136.975 32.539, -128.124 27.742, -121.946 24.548, -116.849 21.339, -112.156 17.479, -109.391 14.206, -107.301 11.715, -105.274 9.477, -103.443 8.229, -102.108 7.7, -99.109 7.428, -96.681 7.745, -93.894 8.843, -89.917 11.687, -85.953 15.017, -81.148 17.266, -78.145 17.986, -75.582 17.887, -68.1 17.987, -64.696 18.493, -61.445 19.38, -60.094 20.288, -59.315 21.564, -57.026 26.51, -55.089 30.962, -53.59 33.657, -52.495 34.691, -50.468 36.204, -46.146 38.672, -41.684 40.663, -37.914 42.055, -33.806 43.082, -27.523 44.149, -21.645 44.96, -16.578 45.406, -13.807 45.771, -14.929 50.108, -16.186 53.919, -17.051 56.0, -18.388 58.824, -19.861 61.567, -21.807 64.188, -23.104 65.742, -25.28 67.904, -27.699 69.823, -28.955 70.728, -32.415 72.768, -34.968 73.998, -38.468 75.309, -48.292 73.025, -56.545 71.12, -64.023 70.474, -72.753 70.357, -78.41 70.827, -80.466 71.093, -82.412 
 71.876, -83.02 72.944, -83.175 74.04, -82.493 74.782, -82.412 75.552, -82.697 76.778, -84.041 78.398, -86.316 81.078, -104.098 80.819, -110.861 80.482, -115.73 80.17, -120.936 79.669, -125.84 79.176, -126.696 79.02, -134.316 77.732, -139.505 76.478, -144.823 74.826, -148.231 73.417, -151.517 71.687, -153.87 70.165, -154.536 69.672, -155.868 68.678, -156.482 68.098, -158.281 66.421, -159.716 64.804, -160.996 63.126, -161.878 61.786, -163.046 59.875, -164.369 57.254, -165.563 54.479, -166.73 51.089, -167.811 47.267, -168.581 44.041, -169.286 40.431)), ((-171.333 23.244, -171.523 18.894, -170.127 18.986, -161.559 18.555, -156.977 18.134, -153.574 18.116, -151.108 18.324, -149.947 18.45, -149.018 18.957, -148.515 19.822, -148.524 20.914, -149.018 21.766, -149.947 22.272, -152.185 23.054, -155.563 23.434, -158.075 23.75, -160.272 24.034, -162.184 24.008, -163.514 23.99, -164.595 23.976, -166.52 23.687, -169.159 23.18, -171.333 23.244)))");
-    list.add(item);
-// this point should be in there...
-    // -164 40 - somewhere near hawaii
-
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "testb");
-    item.put("beam-footprint",   "POLYGON ((-140.153 34.772, -140.341 33.272, -137.024 33.026, -132.723 32.369, -130.947 31.916, -128.664 31.225, -125.293 29.612, -121.813 27.871, -118.699 25.892, -115.589 23.79, -112.593 21.875, -109.136 19.335, -106.939 16.701, -105.006 14.97, -104.195 14.407, -103.049 13.659, -100.363 12.717, -98.063 12.288, -94.299 11.612, -90.825 11.097, -87.997 11.584, -86.815 12.109, -86.163 12.893, -85.014 14.342, -83.804 15.788, -82.104 16.998, -80.413 17.269, -78.005 16.574, -76.181 16.531, -74.65 16.68, -73.552 17.392, -72.957 18.3, -72.917 19.651, -73.526 21.325, -74.913 23.018, -76.036 24.519, -76.159 26.428, -75.741 28.447, -74.257 30.072, -72.771 31.331, -70.517 34.328, -69.638 36.04, -68.624 39.467, -68.015 41.851, -67.607 43.501, -67.548 45.528, -67.586 47.308, -68.601 49.066, -69.868 50.07, -71.621 50.778, -73.285 50.888, -74.9 50.926, -76.994 50.975, -79.332 50.846, -81.066 50.887, -83.842 51.136, -86.569 51.016, -87.95 50.864, -90.831 50.563, -94
 .27 50.644, -98.068 50.733, -102.937 51.032, -106.455 51.484, -109.973 51.936, -114.119 52.402, -117.363 53.031, -119.899 53.276, -123.243 53.539, -127.017 54.427, -130.519 55.431, -133.643 56.058, -134.826 56.279, -135.354 55.029, -135.792 53.864, -136.168965072136 52.8279962761917, -136.169 52.828, -136.169497186166 52.8264970826432, -136.192 52.763, -136.556548517884 51.6453176911637, -136.703232746756 51.2152965828266, -136.781220290925 50.9919311116929, -136.793 50.959, -136.80274055379 50.9259886895048, -136.992 50.295, -137.200898649547 49.5808675274021, -137.202 49.581, -137.200962495599 49.5806459535167, -137.360714473458 49.0197683891632, -137.459 48.677, -137.462166719028 48.6649126473121, -137.471 48.634, -137.515105536699 48.4619710228524, -137.74710368039 47.5528216167105, -137.793718522461 47.3758260237407, -137.854 47.152, -137.977773277882 46.6610808974241, -138.044 46.403, -138.330834102374 45.1674736036557, -138.365 45.019, -138.38180854655 44.9421315900087, -138.
 449801069917 44.6389849661384, -138.485 44.484, -138.497077239724 44.4262941289417, -138.536 44.25, -138.622787032392 43.8206200438395, -138.743816168807 43.232032787661, -138.981390224617 42.0843314825185, -138.989 42.048, -138.990605533614 42.0389442888447, -138.991 42.037, -138.997785044232 41.9994454595406, -139.004 41.969, -139.035645873997 41.7890661698517, -139.061212567475 41.6462082823816, -139.428 39.584, -139.673 38.073, -139.713116752585 37.8001474769807, -139.766 37.457, -139.764942047737 37.4567768906428, -139.898 36.573, -139.897723683259 36.5729429963606, -139.986 35.994, -140.04777653037 35.5462970502163, -140.094 35.232, -140.090797568766 35.2315144621917, -140.153 34.772))");
-    list.add(item);
-
-
-
-    //London is in niether - 51°30'0.00"N   0° 7'0.00"W
-    return list;
-  }
-
-
-  public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data)
-  {
-    // write sample data
-    MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-    try
-    {
-      BatchWriter writer;
-      if (mtbw != null)
-      {
-        writer = mtbw.getBatchWriter("partition");
-      }
-      else
-      {
-        writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-      }
-      int count = 0;
-      Mutation m;
-      for (Map<String, String> object : data)
-      {
-        count++;
-        String id = (count < 10 ? "0" + count : "" + count);
-        Text partition = new Text("" + (count % NUM_PARTITIONS));
-
-        StringBuilder value = new StringBuilder();
-        boolean first = true;
-        for (Entry<String, String> entry : object.entrySet())
-        {
-          if (!first)
-          {
-            value.append("\u0000");
-          }
-          else
-          {
-            first = false;
-          }
-          value.append(entry.getKey());
-          value.append("\uFFFD");
-          value.append(entry.getValue());
-
-          // write the general index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-
-          // write the specific index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-        }
-
-        // write the event mutation
-        m = new Mutation(partition);
-        m.put("event", id, value.toString());
-        writer.addMutation(m);
-      }
-      writer.close();
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableNotFoundException e)
-    {
-      e.printStackTrace();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/pom.xml
----------------------------------------------------------------------
diff --git a/partition/common-query/pom.xml b/partition/common-query/pom.xml
deleted file mode 100644
index 6db84bf..0000000
--- a/partition/common-query/pom.xml
+++ /dev/null
@@ -1,103 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <!--<parent>-->
-    <!--<groupId>sitestore</groupId>-->
-    <!--<artifactId>sitestore</artifactId>-->
-    <!--<version>2.0.0-SNAPSHOT</version>-->
-    <!--</parent>-->
-
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
-        <version>2.0.0-SNAPSHOT</version>
-    </parent>
-
-    <groupId>sitestore.common</groupId>
-    <artifactId>common-query</artifactId>
-    <name>common-query (${project.version})</name>
-    <version>2.0.0-SNAPSHOT</version>
-    <description>A set of filters and iterators for cloudbase queries</description>
-
-    <properties>
-        <skipTests>true</skipTests>
-    </properties>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-source-plugin</artifactId>
-                <version>2.1.2</version>
-                <executions>
-                    <execution>
-                        <id>attach-sources</id>
-                        <phase>install</phase>
-                        <goals>
-                            <goal>jar</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <version>2.7.2</version>
-                <configuration>
-                    <skipTests>${skipTests}</skipTests>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-    <!--<scm>-->
-    <!--<connection>${scmLocation}/tto/ss/common/trunk/common-query</connection>-->
-    <!--</scm>-->
-    <dependencies>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>log4j</groupId>
-            <artifactId>log4j</artifactId>
-            <version>1.2.14</version>
-        </dependency>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-            <version>1.0.4</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-start</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-codec</groupId>
-            <artifactId>commons-codec</artifactId>
-            <version>1.3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.thrift</groupId>
-            <artifactId>thrift</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.vividsolutions</groupId>
-            <artifactId>jts</artifactId>
-            <version>1.11</version>
-        </dependency>
-        <dependency>
-            <groupId>xerces</groupId>
-            <artifactId>xercesImpl</artifactId>
-            <version>2.8.1</version>
-        </dependency>
-    </dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelFilteringIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelFilteringIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelFilteringIterator.java
deleted file mode 100644
index e0126fa..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelFilteringIterator.java
+++ /dev/null
@@ -1,163 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.io.Text;
-
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-import cloudbase.core.data.ByteSequence;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-import cloudbase.core.iterators.WrappingIterator;
-
-public class CellLevelFilteringIterator extends WrappingIterator {
-	private static final Collection<ByteSequence> EMPTY_SET = Collections.emptySet();
-	
-	/** The OGC Filter string **/
-	public static final String OPTION_FILTER = "filter";
-	
-	/** The character or characters that defines the end of the field in the column qualifier. Defaults to '@' **/
-	public static final String OPTION_FIELD_END = "fieldEnd";
-	
-	protected SortedKeyValueIterator<Key, Value> checkSource;
-	
-	protected Map<String, Boolean> cache = new HashMap<String, Boolean>();
-	
-	protected OGCFilter filter;
-	
-	protected String fieldEnd = "@";
-
-	public CellLevelFilteringIterator() {}
-	
-	public CellLevelFilteringIterator(CellLevelFilteringIterator other, IteratorEnvironment env) {
-		setSource(other.getSource().deepCopy(env));
-		checkSource = other.checkSource.deepCopy(env);
-		cache = other.cache;
-		fieldEnd = other.fieldEnd;
-	}
-	
-	@Override
-	public CellLevelFilteringIterator deepCopy(IteratorEnvironment env) {
-		return new CellLevelFilteringIterator(this, env);
-	}
-
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		super.init(source, options, env);
-		if (source instanceof GMDenIntersectingIterator) {
-			checkSource = ((GMDenIntersectingIterator) source).docSource.deepCopy(env);
-		} else if (source instanceof SortedRangeIterator) {
-			checkSource = ((SortedRangeIterator) source).docSource.deepCopy(env);
-		} else {
-			checkSource = source.deepCopy(env);
-		}
-		filter = new OGCFilter();
-		filter.init(options);
-		
-		if (options.containsKey(OPTION_FIELD_END)) {
-			fieldEnd = options.get(OPTION_FIELD_END);
-		}
-	}
-
-	@Override
-	public void next() throws IOException {
-		getSource().next();
-		findTop();
-	}
-	
-	protected String getDocId(Key key) {
-		String colq = key.getColumnQualifier().toString();
-		int i = colq.indexOf("\u0000");
-		if (i == -1) {
-			i = colq.length();	
-		}
-		return colq.substring(0, i);
-	}
-	
-	protected Key getRecordStartKey(Key key, String docId) {
-		return new Key(key.getRow(), key.getColumnFamily(), new Text(docId + "\u0000"));
-	}
-	
-	protected Key getRecordEndKey(Key key, String docId) {
-		return new Key(key.getRow(), key.getColumnFamily(), new Text(docId + "\u0000\uFFFD"));
-	}
-	
-	protected String getField(Key key, Value value) {
-		String colq = key.getColumnQualifier().toString();
-		int i = colq.indexOf("\u0000");
-		if (i == -1) {
-			return null;
-		}
-		
-		int j = colq.indexOf(fieldEnd, i + 1);
-		if (j == -1) {
-			j = colq.length();
-		}
-		
-		return colq.substring(i + 1, j);
-	}
-	
-	protected String getValue(Key key, Value value) {
-		return value.toString();
-	}
-	
-	protected void findTop() throws IOException {
-		boolean goodKey;
-		String docId;
-		Map<String, String> record = new HashMap<String, String>();
-		
-		while (getSource().hasTop()) {
-			docId = getDocId(getSource().getTopKey());
-			
-			// if the document is in the cache, then we have already scanned it
-			if (cache.containsKey(docId)) {
-				goodKey = cache.get(docId);
-			} else {
-				// we need to scan the whole record into a map and evaluate the filter
-				
-				// seek the check source to the beginning of the record
-				Range range = new Range(
-					getRecordStartKey(getSource().getTopKey(), docId),
-					true,
-					getRecordEndKey(getSource().getTopKey(), docId),
-					true
-				);
-				
-				checkSource.seek(range, EMPTY_SET, false);
-				
-				// read in the record to the map
-				record.clear();
-				while (checkSource.hasTop()) {
-					String field = getField(checkSource.getTopKey(), checkSource.getTopValue());
-					if (field != null) {
-						record.put(field, getValue(checkSource.getTopKey(), checkSource.getTopValue()));
-					}
-					checkSource.next();
-				}
-				
-				// evaluate the filter
-				goodKey = filter.accept(record);
-				
-				// cache the result so that we don't do this for every cell
-				cache.put(docId, goodKey);
-			}
-			
-			if (goodKey==true)
-				return;
-			getSource().next();
-		}
-	}
-
-	@Override
-	public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
-		getSource().seek(range, columnFamilies, inclusive);
-		findTop();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelRecordIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelRecordIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelRecordIterator.java
deleted file mode 100644
index 1f59882..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/CellLevelRecordIterator.java
+++ /dev/null
@@ -1,144 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.io.Text;
-
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SkippingIterator;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-
-public class CellLevelRecordIterator extends SkippingIterator {
-	public static final String OPTION_FIELD_END = "fieldEnd";
-	public static final String OPTION_MULTIPLE_DELIMITER = "multipleDelimiter";
-	
-	protected String multipleDelimiter = ",";
-	
-	protected Key topKey;
-	protected Value topValue;
-	protected String fieldEnd = "@";
-	protected String docId = null;
-	protected CBConverter converter = new CBConverter();
-	
-	@Override
-	public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment env) {
-		CellLevelRecordIterator itr = new CellLevelRecordIterator();
-		itr.setSource(this.getSource().deepCopy(env));
-		itr.fieldEnd = this.fieldEnd;
-		return itr;
-	}
-	
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		super.init(source, options, env);
-        converter.init(options);
-		if (options.containsKey(OPTION_FIELD_END)) {
-			fieldEnd = options.get(OPTION_FIELD_END);
-		}
-		
-		if (options.containsKey(OPTION_MULTIPLE_DELIMITER)) {
-			multipleDelimiter = options.get(OPTION_MULTIPLE_DELIMITER);
-		}
-	}
-
-	@Override
-	public void next() throws IOException {
-		consume();
-	}
-
-	@Override
-	public boolean hasTop() {
-		return getSource().hasTop() || topKey != null || topValue != null;
-	}
-
-	@Override
-	public Key getTopKey() {
-		return topKey;
-	}
-
-	@Override
-	public Value getTopValue() {
-		return topValue;
-	}
-	
-	protected String getDocId(Key key) {
-		String colq = key.getColumnQualifier().toString();
-		int i = colq.indexOf("\u0000");
-		if (i == -1) {
-			i = colq.length();	
-		}
-		return colq.substring(0, i);
-	}
-
-	protected Key buildTopKey(Key key, String docId) {
-		return new Key(key.getRow(), key.getColumnFamily(), new Text(docId), key.getColumnVisibility(), key.getTimestamp());
-	}
-	
-	protected String getField(Key key, Value value) {
-		String colq = key.getColumnQualifier().toString();
-		int i = colq.indexOf("\u0000");
-		if (i == -1) {
-			return null;
-		}
-		
-		int j = colq.indexOf(fieldEnd, i + 1);
-		if (j == -1) {
-			j = colq.length();
-		}
-		
-		return colq.substring(i + 1, j);
-	}
-	
-	protected String getValue(Key key, Value value) {
-		return value.toString();
-	}
-	
-	protected Key getRecordStartKey(Key key, String docId) {
-		return new Key(key.getRow(), key.getColumnFamily(), new Text(docId));
-	}
-	
-	protected Key getRecordEndKey(Key key, String docId) {
-		return new Key(key.getRow(), key.getColumnFamily(), new Text(docId + "\u0000\uFFFD"));
-	}
-
-	@Override
-	protected void consume() throws IOException {
-		// build the top key
-		if (getSource().hasTop()) {
-			docId = getDocId(getSource().getTopKey());
-			topKey = buildTopKey(getSource().getTopKey(), docId);
-			
-			Range range = new Range(
-				getRecordStartKey(getSource().getTopKey(), docId),
-				true,
-				getRecordEndKey(getSource().getTopKey(), docId),
-				true
-			);
-			
-			Map<String, String> record = new HashMap<String, String>();
-			while (getSource().hasTop() && range.contains(getSource().getTopKey())) {
-				String field = getField(getSource().getTopKey(), getSource().getTopValue());
-				if (field != null) {
-					if (record.get(field) == null) {
-						record.put(field, getValue(getSource().getTopKey(), getSource().getTopValue()));
-					} else {
-						record.put(field, record.get(field) + multipleDelimiter + getValue(getSource().getTopKey(), getSource().getTopValue()));
-					}
-				}
-				getSource().next();
-			}
-			
-			topValue = converter.toValue(record);  
-		} else {
-			topKey = null;
-			topValue = null;
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/ConversionIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/ConversionIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/ConversionIterator.java
deleted file mode 100644
index 5e75334..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/ConversionIterator.java
+++ /dev/null
@@ -1,151 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import ss.cloudbase.core.iterators.conversion.Operation;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-import cloudbase.core.iterators.WrappingIterator;
-
-public class ConversionIterator extends WrappingIterator {
-	public static final String OPTION_CONVERSIONS = "conversions";
-	public static final String OPTION_MULTI_DOC = "multiDoc";
-	/** The character or characters that defines the end of the field in the column qualifier. Defaults to '@' **/
-	public static final String OPTION_FIELD_END = "fieldEnd";
-	
-	protected CBConverter serializedConverter;
-	protected Map<String, Operation> conversions;
-	protected boolean multiDoc = false;
-	protected String fieldEnd = "@";
-	
-	public ConversionIterator() {}
-	
-	public ConversionIterator(ConversionIterator other) {
-		this.conversions.putAll(other.conversions);
-		this.multiDoc = other.multiDoc;
-		this.serializedConverter = other.serializedConverter;
-	}
-	
-	@Override
-	public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment env) {
-		return new ConversionIterator(this);
-	}
-
-	@Override
-	public Value getTopValue() {
-		if (hasTop()) {
-			if (conversions != null) {
-				if (multiDoc) {
-					return multiDocConvert(super.getTopValue());
-				} else {
-					return convert(super.getTopValue());
-				}
-			}
-		}
-		return super.getTopValue();
-	}
-	
-	protected String getMultiDocField(Key key) {
-		String colq = key.getColumnQualifier().toString();
-		int start = colq.indexOf("\u0000");
-		if (start == -1) {
-			return null;
-		}
-		
-		int end = colq.indexOf(fieldEnd, start + 1);
-		if (end == -1) {
-			end = colq.length();
-		}
-		
-		return colq.substring(start + 1, end);
-	}
-	
-	protected Value multiDocConvert(Value value) {
-		String field = getMultiDocField(getTopKey());
-		if (conversions.containsKey(field)) {
-			String newValue = conversions.get(field).execute(value.toString());
-			return new Value(newValue.getBytes());
-		} else {
-			return value;
-		}
-	}
-	
-	protected Value convert(Value value) {
-		Map<String, String> record = serializedConverter.toMap(getTopKey(), value);
-		
-		for (String field: record.keySet()) {
-			if (conversions.containsKey(field)) {
-				record.put(field, conversions.get(field).execute(record.get(field)));
-			}
-		}
-		
-		return serializedConverter.toValue(record);
-	}
-
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		super.init(source, options, env);
-		
-		if (options.containsKey(OPTION_MULTI_DOC)) {
-			multiDoc = Boolean.parseBoolean(options.get(OPTION_MULTI_DOC));
-		} else {
-			multiDoc = false;
-		}
-		
-		if (!multiDoc) {
-			serializedConverter = new CBConverter();
-			serializedConverter.init(options);
-		}
-		
-		if (options.containsKey(OPTION_FIELD_END)) {
-			fieldEnd = options.get(OPTION_FIELD_END);
-		}
-		
-		if (options.containsKey(OPTION_CONVERSIONS)) {
-			Operation[] ops = decodeConversions(options.get(OPTION_CONVERSIONS));
-			conversions = new HashMap<String, Operation> ();
-			
-			for (Operation o: ops) {
-				conversions.put(o.getField(), o);
-			}
-		}
-	}
-	
-	/**
-	 * Encodes a set of conversion strings for use with the OPTION_CONVERSIONS options. Each conversion
-	 * string should be in the format 'field op value' (whitespace necessary), where op is +, -, *, /, %, or
-	 * ^ and the value is a number.
-	 * 
-	 * @param conversions
-	 * @return The encoded value to use with OPTION_CONVERSIONS
-	 */
-	public static String encodeConversions(String[] conversions) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		for (String conversion: conversions) {
-			if (first) {
-				first = false;
-			} else {
-				sb.append("\u0000");
-			}
-			sb.append(conversion);
-		}
-		return sb.toString();
-	}
-	
-	public static Operation[] decodeConversions(String conversions) {
-		String[] configs = conversions.split("\u0000");
-		Operation[] ops = new Operation[configs.length];
-		
-		for (int i = 0; i < configs.length; i++) {
-			ops[i] = new Operation(configs[i]);
-		}
-		
-		return ops;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/GMDenIntersectingIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/GMDenIntersectingIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/GMDenIntersectingIterator.java
deleted file mode 100644
index 7ec401f..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/GMDenIntersectingIterator.java
+++ /dev/null
@@ -1,363 +0,0 @@
-// Dear Cloudbase,
-// 		Use protected fields/methods as much as possible in APIs.
-// 		Love,
-//			Will
-
-// since the IntersectingIterator/FamilyIntersectingIterator classes are stingy with their fields, we have to use
-// the exact same package name to get at currentPartition and currentDocID
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import ss.cloudbase.core.iterators.IntersectingIterator.TermSource;
-
-import cloudbase.core.data.ArrayByteSequence;
-import cloudbase.core.data.ByteSequence;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-
-/**
- * This class is a copy of FamilyIntersectingIterator with a few minor changes. It assumes a table structure like the following:
- * <table>
- * <tr><th>Row</th><th>Column Family</th><th>Column Qualifier</th><th>Value</th></tr>
- * <tr><td>Partition1</td><td>event</td><td>UUID</td><td>The record value</td></tr>
- * <tr><td>Partition1</td><td>index</td><td>term\u0000UUID</td><td></td></tr>
- * </table>
- * 
- * @author William Wall
- *
- */
-public class GMDenIntersectingIterator extends IntersectingIterator {
-	private static final Logger logger = Logger.getLogger(GMDenIntersectingIterator.class);
-	
-	public static final Text DEFAULT_INDEX_COLF = new Text("i");
-	public static final Text DEFAULT_DOC_COLF = new Text("e");
-	
-	public static final String indexFamilyOptionName = "indexFamily";
-	public static final String docFamilyOptionName = "docFamily";
-	
-	protected static Text indexColf = DEFAULT_INDEX_COLF;
-	protected static Text docColf = DEFAULT_DOC_COLF;
-	protected static Set<ByteSequence> indexColfSet;
-	protected static Set<ByteSequence> docColfSet;
-	
-	protected static final byte[] nullByte = {0};
-	
-	protected SortedKeyValueIterator<Key,Value> docSource;
-	
-	/** 
-	 * Use this option to retrieve all the documents that match the UUID rather than just the first. This 
-	 * is commonly used in cell-level security models that use the column-qualifier like this:
-	 * UUID \0 field1 [] value
-	 * UUID \0 securedField [ALPHA] secretValue
-	 **/
-	public static final String OPTION_MULTI_DOC = "multiDoc";
-	
-	/**
-	 * Use this option to turn off document lookup.
-	 */
-	public static final String OPTION_DOC_LOOKUP = "docLookup";
-	
-	protected boolean multiDoc = false;
-	protected boolean doDocLookup = true;
-	protected Range docRange = null;
-	protected boolean nextId = false;
-	
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		if (options.containsKey(indexFamilyOptionName))
-			indexColf = new Text(options.get(indexFamilyOptionName));
-		if (options.containsKey(docFamilyOptionName))
-			docColf = new Text(options.get(docFamilyOptionName));
-		docSource = source.deepCopy(env);
-		indexColfSet = Collections.singleton((ByteSequence)new ArrayByteSequence(indexColf.getBytes(),0,indexColf.getLength()));
-		
-		if (options.containsKey(OPTION_MULTI_DOC)) {
-			multiDoc = Boolean.parseBoolean(options.get(OPTION_MULTI_DOC));
-		}
-		
-		if (options.containsKey(OPTION_DOC_LOOKUP)) {
-			doDocLookup = Boolean.parseBoolean(options.get(OPTION_DOC_LOOKUP));
-		}
-		
-		if (!doDocLookup) {
-			// it makes no sense to turn on multiDoc if doDocLookup is off
-			multiDoc = false;
-		}
-		
-		// remove any range terms
-		Text[] originalTerms = decodeColumns(options.get(columnFamiliesOptionName));
-		boolean[] originalBooleans = decodeBooleans(options.get(notFlagOptionName));
-		
-		List<Text> terms = new ArrayList<Text>();
-		List<Boolean> termBooleans = new ArrayList<Boolean>();
-		List<Text> ranges = new ArrayList<Text>();
-		List<Boolean> rangeBooleans = new ArrayList<Boolean>();
-		
-		boolean boolsExist = originalBooleans != null && originalBooleans.length == originalTerms.length;
-		
-		for (int i = 0; i < originalTerms.length; i++) {
-			if (isRangeTerm(originalTerms[i])) {
-				ranges.add(originalTerms[i]);
-				if (boolsExist) {
-					rangeBooleans.add(originalBooleans[i]);
-				} else {
-					rangeBooleans.add(false);
-				}
-			} else {
-				terms.add(originalTerms[i]);
-				
-				if (boolsExist) {
-					termBooleans.add(originalBooleans[i]);
-				} else {
-					termBooleans.add(false);
-				}
-			}
-		}
-		
-		boolean[] bools = new boolean[termBooleans.size()];
-		for (int i = 0; i < termBooleans.size(); i++) {
-			bools[i] = termBooleans.get(i).booleanValue();
-		}
-		
-		boolean[] rangeBools = new boolean[rangeBooleans.size()];
-		for (int i = 0; i < rangeBooleans.size(); i++) {
-			rangeBools[i] = rangeBooleans.get(i).booleanValue();
-		}
-		
-		// put the modified term/boolean lists back in the options
-		
-		if (terms.size() < 2) {
-			// the intersecting iterator will choke on these, so we'll set it up ourselves
-			if (terms.size() == 1) {
-				sources = new TermSource[1];
-				sources[0] = new TermSource(source, terms.get(0));
-			}
-		} else {
-			options.put(columnFamiliesOptionName, encodeColumns(terms.toArray(new Text[terms.size()])));
-			if (termBooleans.size() > 0) {
-				options.put(notFlagOptionName, encodeBooleans(bools));
-			}
-			
-			super.init(source, options, env);
-		}
-		
-		// add the range terms
-		if (ranges.size() > 0) {
-			
-			TermSource[] localSources;
-			
-			int offset = 0;
-			if (sources != null) {
-				localSources = new TermSource[sources.length + ranges.size()];
-				
-				// copy array
-				for (int i = 0; i < sources.length; i++) {
-					localSources[i] = sources[i];
-				}
-				
-				offset = sources.length;
-			} else {
-				localSources = new TermSource[ranges.size()];
-			}
-			
-			for (int i = 0; i < ranges.size(); i++) {
-				IntersectionRange ri = new IntersectionRange();
-				ri.init(source.deepCopy(env), getRangeIteratorOptions(ranges.get(i)), env);
-				localSources[i + offset] = new TermSource(ri, ri.getOutputTerm(), rangeBools[i]);
-			}
-			
-			sources = localSources;
-		}
-		
-		sourcesCount = sources.length;
-		
-		if (sourcesCount < 2) {
-			throw new IOException("GMDenIntersectingIterator requires two or more terms");
-		}
-		
-		docColfSet = Collections.singleton((ByteSequence)new ArrayByteSequence(docColf.getBytes(),0,docColf.getLength()));
-	}
-
-	@Override
-	protected Key buildKey(Text partition, Text term, Text docID) {
-		Text colq = new Text(term);
-		colq.append(nullByte, 0, 1);
-		colq.append(docID.getBytes(), 0, docID.getLength());
-		return new Key(partition, indexColf, colq);
-	}
-
-	@Override
-	protected Key buildKey(Text partition, Text term) {
-		Text colq = new Text(term);
-		return new Key(partition, indexColf, colq);
-	}
-
-	@Override
-	protected Text getTerm(Key key) {
-		if (indexColf.compareTo(key.getColumnFamily().getBytes(),0,indexColf.getLength())< 0) {
-		 // We're past the index column family, so return a term that will sort lexicographically last.
-		 // The last unicode character should suffice
-		 return new Text("\uFFFD");
-		}
-		Text colq = key.getColumnQualifier();
-		int zeroIndex = colq.find("\0");
-		Text term = new Text();
-		term.set(colq.getBytes(),0,zeroIndex);
-		return term;
-	}
-
-	@Override
-	public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment env) {
-		GMDenIntersectingIterator newItr = new GMDenIntersectingIterator();
-		if(sources != null) {
-		    newItr.sourcesCount = sourcesCount;
-			newItr.sources = new TermSource[sourcesCount];
-			for(int i = 0; i < sourcesCount; i++) {
-				newItr.sources[i] = new TermSource(sources[i].iter.deepCopy(env), sources[i].term);
-			}
-		}
-		newItr.currentDocID = currentDocID;
-		newItr.currentPartition = currentPartition;
-		newItr.docRange = docRange;
-		newItr.docSource = docSource.deepCopy(env);
-		newItr.inclusive = inclusive;
-		newItr.multiDoc = multiDoc;
-		newItr.nextId = nextId;
-		newItr.overallRange = overallRange;
-		return newItr;
-	}
-	
-	@Override
-	public void seek(Range range, Collection<ByteSequence> seekColumnFamilies, boolean inclusive) throws IOException {
-		super.seek(range, indexColfSet, true);
-		
-	}
-	
-	@Override
-	protected Text getDocID(Key key) {
-		Text colq = key.getColumnQualifier();
-		int firstZeroIndex = colq.find("\0");
-		if (firstZeroIndex < 0) {
-			throw new IllegalArgumentException("bad docid: "+key.toString());
-		}
-		Text docID = new Text();
-		try {
-			docID.set(colq.getBytes(),firstZeroIndex+1, colq.getBytes().length - firstZeroIndex - 1);
-		} catch (ArrayIndexOutOfBoundsException e) {
-			throw new IllegalArgumentException("bad indices for docid: "+key.toString()+" "+firstZeroIndex +" " + (colq.getBytes().length - firstZeroIndex - 1));			
-		}
-		return docID;
-	}
-	
-	protected Key buildStartKey() {
-		return new Key(currentPartition, docColf, currentDocID);
-	}
-	
-	protected Key buildEndKey() {
-		if (multiDoc) {
-			return new Key(currentPartition, docColf, new Text(currentDocID.toString() + "\u0000\uFFFD"));
-		}
-		return null;
-	}
-	
-	@Override
-	public void next() throws IOException {
-		if (multiDoc && nextId) {
-			docSource.next();
-			
-			// check to make sure that the docSource top is less than our max key
-			if (docSource.hasTop() && docRange.contains(docSource.getTopKey())) {
-				topKey = docSource.getTopKey();
-				value = docSource.getTopValue();
-				return;
-			}
-		}
-		
-		nextId = false;
-		super.next();
-	}
-
-	@Override
-	protected void advanceToIntersection() throws IOException {
-		super.advanceToIntersection();
-		
-		if (topKey==null || !doDocLookup)
-			return;
-		
-		if (logger.isTraceEnabled()) logger.trace("using top key to seek for doc: "+topKey.toString());
-		docRange = new Range(buildStartKey(), true, buildEndKey(), false);
-		docSource.seek(docRange, docColfSet, true);
-		logger.debug("got doc key: "+docSource.getTopKey().toString());
-		if (docSource.hasTop()&& docRange.contains(docSource.getTopKey())) {
-			value = docSource.getTopValue();
-		}
-		logger.debug("got doc value: "+value.toString());
-		
-		if (docSource.hasTop()) {
-			if (multiDoc && topKey != null) {
-				nextId = true;
-			}
-			topKey = docSource.getTopKey();
-		}
-	}
-
-	
-	public boolean isRangeTerm(Text term) {
-		return term.toString().startsWith("range\u0000");
-	}
-
-	protected Map<String, String> getRangeIteratorOptions(Text config) {
-		// we want the keys from Range Iterators to look like this:
-		// range|colf|lower|includeLower|upper|includeUpper
-		// e.g. range|geo|21332|true|21333|false
-		
-		// and we'll output a key like this:
-		// partition index:geo\0UUID ...
-		
-		
-		String[] range = config.toString().split("\u0000");
-		Map<String, String> options = new HashMap<String, String>();
-		options.put(IntersectionRange.OPTION_COLF, range[1]);
-		options.put(IntersectionRange.OPTION_OUTPUT_TERM, range[1]);
-		options.put(IntersectionRange.OPTION_LOWER_BOUND, range[2]);
-		options.put(IntersectionRange.OPTION_START_INCLUSIVE, range[3]);
-		options.put(IntersectionRange.OPTION_UPPER_BOUND, range[4]);
-		options.put(IntersectionRange.OPTION_END_INCLUSIVE, range[5]);
-		options.put(IntersectionRange.OPTION_OUTPUT_COLF, indexColf.toString());
-		return options;
-	}
-	
-	/**
-	 * Builds a range term for use with the IntersectingIterator
-	 * @param colf The column family to search
-	 * @param start The start of the range
-	 * @param includeStart Whether the start of the range is inclusive or not
-	 * @param end The end of the range 
-	 * @param includeEnd Whether the end of the range is inclusive or not
-	 * @return A String formatted for use as a term a GMDenIntersectingIterator
-	 */
-	public static String getRangeTerm(String colf, String start, boolean includeStart, String end, boolean includeEnd) {
-		StringBuilder sb = new StringBuilder();
-		sb.append("range\u0000");
-		sb.append(colf).append("\u0000");
-		sb.append(start).append("\u0000");
-		sb.append(includeStart ? "true": "false").append("\u0000");
-		sb.append(end).append("\u0000");
-		sb.append(includeEnd ? "true": "false").append("\u0000");
-		return sb.toString();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectingIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectingIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectingIterator.java
deleted file mode 100644
index 3b4961f..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectingIterator.java
+++ /dev/null
@@ -1,557 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import cloudbase.core.data.ByteSequence;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-import cloudbase.core.util.TextUtil;
-
-public class IntersectingIterator implements SortedKeyValueIterator<Key,Value> {
-	
-    protected Text nullText = new Text();
-    
-	protected Text getPartition(Key key) {
-		return key.getRow();
-	}
-	
-	protected Text getTerm(Key key) {
-		return key.getColumnFamily();
-	}
-	
-	protected Text getDocID(Key key) {
-		return key.getColumnQualifier();
-	}
-	
-	protected Key buildKey(Text partition, Text term) {
-	    return new Key(partition,(term == null) ? nullText : term);
-	}
-	
-	protected Key buildKey(Text partition, Text term, Text docID) {
-	    return new Key(partition,(term == null) ? nullText : term, docID);
-	}
-	
-	protected Key buildFollowingPartitionKey(Key key) {
-		return key.followingKey(PartialKey.ROW);
-	}
-	
-    protected static final Logger log = Logger.getLogger(IntersectingIterator.class); 
-	
-	protected static class TermSource {
-		public SortedKeyValueIterator<Key,Value> iter;
-		public Text term;
-		public boolean notFlag;
-		
-		public TermSource(TermSource other) {
-            this.iter = other.iter;
-            this.term = other.term;
-            this.notFlag = other.notFlag;
-        }
-		
-		public TermSource(SortedKeyValueIterator<Key,Value> iter, Text term) {
-			this.iter = iter;
-			this.term = term;
-			this.notFlag = false;
-		}
-		public TermSource(SortedKeyValueIterator<Key,Value> iter, Text term, boolean notFlag) {
-			this.iter = iter;
-			this.term = term;
-			this.notFlag = notFlag;
-		}
-		
-		public String getTermString() {
-	        return (this.term == null) ? new String("Iterator") : this.term.toString();
-	    }
-	}
-	
-	protected TermSource[] sources;
-	protected int sourcesCount = 0;
-
-	protected Range overallRange;
-	
-	// query-time settings
-	protected Text currentPartition = null;
-	protected Text currentDocID = new Text(emptyByteArray);
-	protected static final byte [] emptyByteArray = new byte[0];
-	
-	protected Key topKey = null;
-	protected Value value = new Value(emptyByteArray);
-
-	protected Collection<ByteSequence> seekColumnFamilies;
-
-	protected boolean inclusive;
-	
-
-	public IntersectingIterator()
-	{}
-	
-	@Override
-	public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment env) {
-		return new IntersectingIterator(this, env);
-	}
-	
-    public IntersectingIterator(IntersectingIterator other, IteratorEnvironment env)
-	{
-		if(other.sources != null)
-		{
-		    sourcesCount = other.sourcesCount;
-			sources = new TermSource[sourcesCount];
-			for(int i = 0; i < sourcesCount; i++)
-			{
-				sources[i] = new TermSource(other.sources[i].iter.deepCopy(env), other.sources[i].term);
-			}
-		}
-	}
-
-	@Override
-	public Key getTopKey() {
-		return topKey;
-	}
-
-	@Override
-	public Value getTopValue() {
-		// we don't really care about values
-		return value;
-	}
-
-	@Override
-	public boolean hasTop() {
-		return currentPartition != null;
-	}
-
-	// precondition: currentRow is not null
-	private boolean seekOneSource(int sourceID) throws IOException
-	{
-		// find the next key in the appropriate column family that is at or beyond the cursor (currentRow, currentCQ)
-		// advance the cursor if this source goes beyond it
-		// return whether we advanced the cursor
-
-		// within this loop progress must be made in one of the following forms:
-		// - currentRow or currentCQ must be increased
-		// - the given source must advance its iterator
-		// this loop will end when any of the following criteria are met
-		// - the iterator for the given source is pointing to the key (currentRow, columnFamilies[sourceID], currentCQ)
-		// - the given source is out of data and currentRow is set to null
-		// - the given source has advanced beyond the endRow and currentRow is set to null
-		boolean advancedCursor = false;
-
-		if (sources[sourceID].notFlag)
-		{
-			while(true)
-			{
-				if(sources[sourceID].iter.hasTop() == false)
-				{
-					// an empty column that you are negating is a valid condition
-					break;
-				}
-				// check if we're past the end key
-				int endCompare = -1;
-				// we should compare the row to the end of the range
-				if(overallRange.getEndKey() != null)
-				{
-					endCompare = overallRange.getEndKey().getRow().compareTo(sources[sourceID].iter.getTopKey().getRow());
-					if((!overallRange.isEndKeyInclusive() && endCompare <= 0) || endCompare < 0)
-					{
-						// an empty column that you are negating is a valid condition
-						break;
-					}
-				}
-				int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey()));
-				// check if this source is already at or beyond currentRow
-				// if not, then seek to at least the current row
-
-				if(partitionCompare > 0)
-				{
-					// seek to at least the currentRow
-					Key seekKey = buildKey(currentPartition,sources[sourceID].term);
-					sources[sourceID].iter.seek(new Range(seekKey,true, null, false), seekColumnFamilies, inclusive);
-					continue;
-				}
-				// check if this source has gone beyond currentRow
-				// if so, this is a valid condition for negation
-				if(partitionCompare < 0)
-				{
-					break;
-				}
-				// we have verified that the current source is positioned in currentRow
-				// now we must make sure we're in the right columnFamily in the current row
-                // Note: Iterators are auto-magically set to the correct columnFamily
-                if(sources[sourceID].term != null)
-                {
-    				int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey()));
-    				// check if this source is already on the right columnFamily
-    				// if not, then seek forwards to the right columnFamily
-    				if(termCompare > 0)
-    				{
-    					Key seekKey = buildKey(currentPartition,sources[sourceID].term,currentDocID);
-    					sources[sourceID].iter.seek(new Range(seekKey,true,null,false), seekColumnFamilies, inclusive);
-    					continue;
-    				}
-    				// check if this source is beyond the right columnFamily
-    				// if so, then this is a valid condition for negating
-    				if(termCompare < 0)
-    				{
-    					break;
-    				}
-                }
-				
-				// we have verified that we are in currentRow and the correct column family
-				// make sure we are at or beyond columnQualifier
-				Text docID = getDocID(sources[sourceID].iter.getTopKey());
-				int docIDCompare = currentDocID.compareTo(docID);
-				// If we are past the target, this is a valid result
-				if(docIDCompare < 0)
-				{
-					break;
-				}
-				// if this source is not yet at the currentCQ then advance in this source
-				if(docIDCompare > 0)
-				{
-					// seek forwards
-					Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
-					sources[sourceID].iter.seek(new Range(seekKey, true, null, false), seekColumnFamilies, inclusive);
-					continue;
-				}
-				// if we are equal to the target, this is an invalid result.
-				// Force the entire process to go to the next row.
-				// We are advancing column 0 because we forced that column to not contain a !
-				//    when we did the init()
-				if(docIDCompare == 0)
-				{
-					sources[0].iter.next();
-					advancedCursor = true;
-					break;
-				}	
-			}
-		}
-		else
-		{
-			while(true)
-			{
-				if(sources[sourceID].iter.hasTop() == false)
-				{
-					currentPartition = null;
-					// setting currentRow to null counts as advancing the cursor
-					return true;
-				}
-				// check if we're past the end key
-				int endCompare = -1;
-				// we should compare the row to the end of the range
-
-				if(overallRange.getEndKey() != null)
-				{
-					endCompare = overallRange.getEndKey().getRow().compareTo(sources[sourceID].iter.getTopKey().getRow());
-					if((!overallRange.isEndKeyInclusive() && endCompare <= 0) || endCompare < 0)
-					{
-						currentPartition = null;
-						// setting currentRow to null counts as advancing the cursor
-						return true;
-					}
-				}
-				int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey()));
-				// check if this source is already at or beyond currentRow
-				// if not, then seek to at least the current row
-				if(partitionCompare > 0)
-				{
-					// seek to at least the currentRow
-					Key seekKey = buildKey(currentPartition,sources[sourceID].term);
-					sources[sourceID].iter.seek(new Range(seekKey,true, null, false), seekColumnFamilies, inclusive);
-					continue;
-				}
-				// check if this source has gone beyond currentRow
-				// if so, advance currentRow
-				if(partitionCompare < 0)
-				{
-					currentPartition.set(getPartition(sources[sourceID].iter.getTopKey()));
-					currentDocID.set(emptyByteArray);
-					advancedCursor = true;
-					continue;
-				}
-				// we have verified that the current source is positioned in currentRow
-				// now we must make sure we're in the right columnFamily in the current row
-                // Note: Iterators are auto-magically set to the correct columnFamily
-
-                if(sources[sourceID].term != null)
-                {
-    				int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey()));
-    				// check if this source is already on the right columnFamily
-    				// if not, then seek forwards to the right columnFamily
-    				if(termCompare > 0)
-    				{
-    					Key seekKey = buildKey(currentPartition,sources[sourceID].term,currentDocID);
-    					sources[sourceID].iter.seek(new Range(seekKey,true,null,false), seekColumnFamilies, inclusive);
-    					continue;
-    				}
-    				// check if this source is beyond the right columnFamily
-    				// if so, then seek to the next row
-    				if(termCompare < 0)
-    				{
-					    // we're out of entries in the current row, so seek to the next one
-                        //              byte[] currentRowBytes = currentRow.getBytes();
-                        //              byte[] nextRow = new byte[currentRowBytes.length + 1];
-                        //              System.arraycopy(currentRowBytes, 0, nextRow, 0, currentRowBytes.length);
-                        //              nextRow[currentRowBytes.length] = (byte)0;
-                        //              // we should reuse text objects here
-                        //              sources[sourceID].seek(new Key(new Text(nextRow),columnFamilies[sourceID]));
-    					if(endCompare == 0)
-    					{
-    						// we're done
-    						currentPartition = null;
-    						// setting currentRow to null counts as advancing the cursor
-    						return true;
-    					}
-    					Key seekKey = buildFollowingPartitionKey(sources[sourceID].iter.getTopKey());
-    					try {
-    						sources[sourceID].iter.seek(new Range(seekKey, true, null, false), seekColumnFamilies, inclusive);
-    					} catch (Exception e) {
-    						// the seek will throw an exception if we have crossed a tablet boundary
-    						// setting the Partition to null will advance to the next tablet
-    						currentPartition = null;
-    						return true;
-    					}
-    					continue;
-    				}
-                }
-				// we have verified that we are in currentRow and the correct column family
-				// make sure we are at or beyond columnQualifier
-				Text docID = getDocID(sources[sourceID].iter.getTopKey());
-				int docIDCompare = currentDocID.compareTo(docID);
-				// if this source has advanced beyond the current column qualifier then advance currentCQ and return true
-				if(docIDCompare < 0)
-				{
-					currentDocID.set(docID);
-					advancedCursor = true;
-					break;
-				}
-				// if this source is not yet at the currentCQ then seek in this source
-				if(docIDCompare > 0)
-				{
-					// seek forwards
-					Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
-					sources[sourceID].iter.seek(new Range(seekKey, true, null, false), seekColumnFamilies, inclusive);
-					continue;
-				}
-				// this source is at the current row, in its column family, and at currentCQ
-				break;
-			}
-		}
-		return advancedCursor;
-	}
-
-	@Override
-	public void next() throws IOException {
-		if(currentPartition == null)
-		{
-			return;
-		}
-		// precondition: the current row is set up and the sources all have the same column qualifier
-		// while we don't have a match, seek in the source with the smallest column qualifier
-		sources[0].iter.next();
-		advanceToIntersection();
-	}
-	
-	protected void advanceToIntersection() throws IOException
-	{
-		boolean cursorChanged = true;
-		int numSeeks = 0;
-		while(cursorChanged)
-		{
-			// seek all of the sources to at least the highest seen column qualifier in the current row
-			cursorChanged = false;
-			for(int i = 0; i < sourcesCount; i++)
-			{
-				if(currentPartition == null)
-				{
-					topKey = null;
-					return;
-				}
-				numSeeks++;
-				if(seekOneSource(i))
-				{
-					cursorChanged = true;
-					break;
-				}
-			}
-		}
-		topKey = buildKey(currentPartition,nullText,currentDocID);
-	}
-	
-	public static String stringTopKey(SortedKeyValueIterator<Key, Value> iter) {
-		if (iter.hasTop())
-			return iter.getTopKey().toString();
-		return "";
-	}
-	
-	public static final String columnFamiliesOptionName = "columnFamilies";
-	public static final String notFlagOptionName = "notFlag";
-
-	public static String encodeColumns(Text[] columns)
-	{
-		StringBuilder sb = new StringBuilder();
-		for(int i = 0; i < columns.length; i++)
-		{
-			sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i]))));
-			sb.append('\n');
-		}
-		return sb.toString();
-	}
-
-	public static String encodeBooleans(boolean[] flags)
-	{
-		byte[] bytes = new byte[flags.length];
-		for(int i = 0; i < flags.length; i++)
-		{
-			if(flags[i])
-				bytes[i] = 1;
-			else
-				bytes[i] = 0;
-		}
-		return new String(Base64.encodeBase64(bytes));
-	}
-	
-	public static Text[] decodeColumns(String columns)
-	{
-		String[] columnStrings = columns.split("\n");
-		Text[] columnTexts = new Text[columnStrings.length];
-		for(int i = 0; i < columnStrings.length; i++)
-		{
-			columnTexts[i] = new Text(Base64.decodeBase64(columnStrings[i].getBytes()));
-		}
-		return columnTexts;
-	}
-
-	public static boolean[] decodeBooleans(String flags)
-	{
-		// return null of there were no flags
-		if(flags == null)
-			return null;				
-		
-		byte[] bytes = Base64.decodeBase64(flags.getBytes());
-		boolean[] bFlags = new boolean[bytes.length];
-		for(int i = 0; i < bytes.length; i++)
-		{
-			if(bytes[i] == 1)
-				bFlags[i] = true;
-			else
-				bFlags[i] = false;
-		}
-		return bFlags; 
-	}
-	
-    @Override
-	public void init(SortedKeyValueIterator<Key, Value> source,
-			Map<String, String> options, IteratorEnvironment env) throws IOException {
-		Text[] terms = decodeColumns(options.get(columnFamiliesOptionName));
-		boolean[] notFlag = decodeBooleans(options.get(notFlagOptionName));
-
-		if(terms.length < 2)
-		{
-			throw new IOException("IntersectionIterator requires two or more columns families");
-		}
-
-		// Scan the not flags.
-		// There must be at least one term that isn't negated
-		// And we are going to re-order such that the first term is not a ! term
-		if(notFlag == null)
-		{
-			notFlag = new boolean[terms.length];
-			for(int i = 0; i < terms.length; i++)
-				notFlag[i] = false;
-		}
-		if(notFlag[0]) {
-			for(int i = 1; i < notFlag.length; i++)
-			{				
-				if(notFlag[i] == false)
-				{
-					Text swapFamily = new Text(terms[0]);
-					terms[0].set(terms[i]);
-					terms[i].set(swapFamily);
-					notFlag[0] = false;
-					notFlag[i] = true;
-					break;
-				}
-			}
-			if(notFlag[0])
-			{
-				throw new IOException("IntersectionIterator requires at lest one column family without not");				
-			}
-		}
-		
-		
-		sources = new TermSource[terms.length];
-		sources[0] = new TermSource(source, terms[0]);
-		for(int i = 1; i < terms.length; i++)
-		{
-			sources[i] = new TermSource(source.deepCopy(env), terms[i], notFlag[i]);
-		}
-		sourcesCount = terms.length;
-	}
-
-	@Override
-	public void seek(Range range, Collection<ByteSequence> seekColumnFamilies, boolean inclusive) throws IOException {
-		overallRange = new Range(range);
-		currentPartition = new Text();
-		currentDocID.set(emptyByteArray);
-		
-		this.seekColumnFamilies = seekColumnFamilies;
-		this.inclusive = inclusive;
-		
-		// seek each of the sources to the right column family within the row given by key
-		for(int i = 0; i < sourcesCount; i++)
-		{
-			Key sourceKey;
-			if(range.getStartKey() != null)
-			{
-                if(range.getStartKey().getColumnQualifier() != null)
-                {
-                    sourceKey = buildKey(getPartition(range.getStartKey()),sources[i].term,range.getStartKey().getColumnQualifier());
-                }
-                else
-                {
-                    sourceKey = buildKey(getPartition(range.getStartKey()),sources[i].term);
-                }
-				sources[i].iter.seek(new Range(sourceKey, true, null, false), seekColumnFamilies, inclusive);
-			}
-			else
-			{
-				sources[i].iter.seek(range, seekColumnFamilies, inclusive);
-			}
-		}
-		advanceToIntersection();
-	}
-
-    public void addSource(SortedKeyValueIterator<Key, Value> source, IteratorEnvironment env, 
-            Text term, boolean notFlag) {
-        // Check if we have space for the added Source
-        if(sources == null)
-        {
-            sources = new TermSource[1];
-        }
-        else 
-        {
-            // allocate space for node, and copy current tree.
-            // TODO:  Should we change this to an ArrayList so that we can just add() ?
-            TermSource[] localSources = new TermSource[sources.length + 1];
-            int currSource = 0;
-            for(TermSource myTerm : sources)
-            {
-                // TODO:  Do I need to call new here? or can I just re-use the term?
-                localSources[currSource] = new TermSource(myTerm);
-                currSource++;
-            }
-            sources = localSources;
-        }
-        sources[sourcesCount] = new TermSource(source.deepCopy(env), term, notFlag);
-        sourcesCount++; 
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectionRange.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectionRange.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectionRange.java
deleted file mode 100644
index 04d5884..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IntersectionRange.java
+++ /dev/null
@@ -1,330 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-
-import cloudbase.core.client.CBException;
-import cloudbase.core.data.ArrayByteSequence;
-import cloudbase.core.data.ByteSequence;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-
-/**
- * When attempting to intersect a term which is a range (lowerval <= x <= upperval), the entire range
- * must first be scanned so that the document keys can be sorted before passing them up to the 
- * intersecting iterator of choice. 
- * 
- * @author William Wall (wawall)
- */
-public class IntersectionRange implements SortedKeyValueIterator<Key, Value>{
-	private static final Logger logger = Logger.getLogger(IntersectionRange.class);
-	
-	public static final String OPTION_OUTPUT_COLF = "outputColf";
-	public static final String OPTION_OUTPUT_TERM = "outputTerm";
-	public static final String OPTION_COLF = "columnFamily";
-	public static final String OPTION_LOWER_BOUND = "lower";
-	public static final String OPTION_UPPER_BOUND = "upper";
-	public static final String OPTION_DELIMITER = "delimiter";
-	public static final String OPTION_START_INCLUSIVE = "startInclusive";
-	public static final String OPTION_END_INCLUSIVE = "endInclusive";
-	public static final String OPTION_TEST_OUTOFMEM = "testOutOfMemory";
-	
-	protected SortedKeyValueIterator<Key, Value> source;
-	protected Text colf = null;
-	protected Text lower = null;
-	protected Text upper = null;
-	protected String delimiter = null;
-	protected String outputTerm = null;
-	protected Text outputColf = null;
-	protected Text currentPartition = null;
-	protected boolean startInclusive = true;
-	protected boolean endInclusive = false;
-	protected boolean testOutOfMemory = false;
-	
-	protected Key topKey = null;
-	
-	protected Iterator<Key> itr;
-	protected boolean sortComplete = false;
-	protected Range overallRange;
-	protected SortedSet<Key> docIds = new TreeSet<Key>();
-	protected static Set<ByteSequence> indexColfSet;
-	
-	@Override
-	public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment env) {
-		return new IntersectionRange(this, env);
-	}
-	
-	public IntersectionRange() {
-		logger.setLevel(Level.ALL);
-	}
-	
-	public IntersectionRange(IntersectionRange other, IteratorEnvironment env) {
-		source = other.source.deepCopy(env);
-		colf = other.colf;
-		lower = other.lower;
-		upper = other.upper;
-		delimiter = other.delimiter;
-		outputColf = other.outputColf;
-		outputTerm = other.outputTerm;
-		currentPartition = other.currentPartition;
-		startInclusive = other.startInclusive;
-		endInclusive = other.endInclusive;
-		topKey = other.topKey;
-		docIds.addAll(other.docIds);
-		itr = docIds.iterator();
-		sortComplete = other.sortComplete;
-		overallRange = other.overallRange;
-	}
-	
-	public Text getOutputTerm() {
-		return new Text(outputTerm);
-	}
-	
-	public Text getOutputColumnFamily() {
-		return outputColf;
-	}
-
-	@Override
-	public Key getTopKey() {
-		return topKey;
-	}
-
-	@Override
-	public Value getTopValue() {
-		return IteratorConstants.emptyValue;
-	}
-
-	@Override
-	public boolean hasTop() {
-		try {
-			if (topKey == null) next();
-		} catch (IOException e) {
-			
-		}
-		
-		return topKey != null;
-	}
-	
-	protected String getDocID(Key key) {
-		try {
-			String s = key.getColumnQualifier().toString();
-			int start = s.indexOf("\u0000") + 1;
-			int end = s.indexOf("\u0000", start);
-			if (end == -1) {
-				end = s.length();
-			}
-			return s.substring(start, end);
-		} catch (Exception e) {
-			
-		}
-		return null;
-	}
-	
-	protected Text getTerm(Key key) {
-		try {
-			Text colq = key.getColumnQualifier(); 
-			Text term = new Text();
-			term.set(colq.getBytes(), 0, colq.find("\0"));
-			return term;
-		} catch (Exception e) {
-		}
-		return null;
-	}
-	
-	protected Text getPartition(Key key) {
-		return key.getRow();
-	}
-	
-	protected Text getFollowingPartition(Key key) {
-		return key.followingKey(PartialKey.ROW).getRow();
-	}
-	
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		if (options.containsKey(OPTION_LOWER_BOUND)) {
-			lower = new Text(options.get(OPTION_LOWER_BOUND));
-		} else {
-			lower = new Text("\u0000");
-		}
-		
-		if (options.containsKey(OPTION_UPPER_BOUND)) {
-			upper = new Text(options.get(OPTION_UPPER_BOUND));
-		} else {
-			upper = new Text("\u0000");
-		}
-		
-		if (options.containsKey(OPTION_DELIMITER)) {
-			delimiter = options.get(OPTION_DELIMITER);
-		} else {
-			delimiter = "\u0000";
-		}
-		
-		if (options.containsKey(OPTION_COLF)) {
-			colf = new Text(options.get(OPTION_COLF));
-		} else {
-			colf = new Text("index");
-		}
-		
-		if (options.containsKey(OPTION_OUTPUT_COLF)) {
-			outputColf = new Text(options.get(OPTION_OUTPUT_COLF));
-		} else {
-			outputColf = colf;
-		}
-		
-		if (options.containsKey(OPTION_START_INCLUSIVE)) {
-			startInclusive = Boolean.parseBoolean(options.get(OPTION_START_INCLUSIVE));
-		}
-		
-		if (options.containsKey(OPTION_END_INCLUSIVE)) {
-			endInclusive = Boolean.parseBoolean(options.get(OPTION_END_INCLUSIVE));
-		}
-		
-		if (options.containsKey(OPTION_TEST_OUTOFMEM)) {
-			testOutOfMemory = Boolean.parseBoolean(options.get(OPTION_TEST_OUTOFMEM));
-		}
-		
-		outputTerm = options.get(OPTION_OUTPUT_TERM);
-		this.source = source;
-		
-		indexColfSet = Collections.singleton((ByteSequence) new ArrayByteSequence(colf.getBytes(),0,colf.getLength()));
-	}
-	
-	/**
-	 * Sets up the document/record IDs in a sorted structure. 
-	 * @throws IOException
-	 * @throws CBException 
-	 */
-	protected void setUpDocIds() throws IOException {
-		int count = 0;
-		try {
-			if (testOutOfMemory) {
-				throw new OutOfMemoryError();
-			}
-			
-			long start = System.currentTimeMillis();
-			if (source.hasTop()) {
-				docIds.clear();
-				currentPartition = getPartition(source.getTopKey());
-				while (currentPartition != null) {
-					Key lowerKey = new Key(currentPartition, colf, lower);
-					try {
-						source.seek(new Range(lowerKey, true, null, false), indexColfSet, true);
-					} catch (IllegalArgumentException e) {
-						// the range does not overlap the overall range? quit
-						currentPartition = null;
-						break;
-					}
-					
-					// if we don't have a value then quit
-					if (!source.hasTop()) {
-						currentPartition = null;
-						break;
-					}
-					
-					Key top;
-					while(source.hasTop()) {
-						top = source.getTopKey();
-						
-						if (overallRange != null && overallRange.getEndKey() != null) {
-							// see if we're past the end of the partition range
-							int endCompare = overallRange.getEndKey().compareTo(top, PartialKey.ROW);
-							if ((!overallRange.isEndKeyInclusive() && endCompare <= 0) || endCompare < 0) {
-								// we're done
-								currentPartition = null;
-								break;
-							}
-						}
-						
-						// make sure we're still in the right partition
-						if (currentPartition.compareTo(getPartition(top)) < 0) {
-							currentPartition.set(getPartition(top));
-							break;
-						}
-						
-						// make sure we're still in the right column family
-						if (colf.compareTo(top.getColumnFamily()) < 0) {
-							// if not, then get the next partition
-							currentPartition = getFollowingPartition(top);
-							break;
-						}
-						
-						Text term = getTerm(top);
-						int lowerCompare = term.compareTo(lower);
-						int upperCompare = term.compareTo(upper);
-						
-						// if we went past the upper bound, jump to the next partition
-						if ((endInclusive && upperCompare > 0) || (!endInclusive && upperCompare >= 0)) {
-							currentPartition = getFollowingPartition(top);
-							break;
-						} else if ((startInclusive && lowerCompare >= 0) || (!startInclusive && lowerCompare > 0)) {
-							// if the term is lexicographically between the upper and lower bounds,
-							// then add the doc ID
-							docIds.add(buildOutputKey(top));
-							count++;
-						}
-						source.next();
-						
-						// make sure we check to see if we're at the end before potentially seeking back
-						if (!source.hasTop()) {
-							currentPartition = null;
-							break;
-						}
-					}
-				}
-				itr = docIds.iterator();
-				sortComplete = true;
-				logger.debug("setUpDocIds completed for " + lower + "<=" + colf + "<=" + upper + " in " + (System.currentTimeMillis() - start) + " ms. Count = " + count);
-			} else {
-				logger.warn("There appear to be no records on this tablet");
-			}
-		} catch (OutOfMemoryError e) {
-			logger.warn("OutOfMemory error: Count = " + count);
-			throw new IOException("OutOfMemory error while sorting keys");
-		}
-	}
-	
-	protected Key buildOutputKey(Key key) {
-		String id = getDocID(key);
-		return new Key(currentPartition, outputColf, new Text((outputTerm != null ? outputTerm: colf.toString()) + "\u0000" +id));
-	}
-
-	@Override
-	public void next() throws IOException {
-		if (itr != null && itr.hasNext()) {
-			topKey = itr.next();
-		} else {
-			topKey = null;
-		}
-	}
-
-	@Override
-	public void seek(Range range, Collection<ByteSequence> colfs, boolean inclusive) throws IOException {
-		if (!sortComplete) {
-			overallRange = range;
-			source.seek(range, colfs, inclusive);
-			setUpDocIds();
-		}
-		
-		if (range.getStartKey() != null) {
-			while (hasTop() && topKey.compareTo(range.getStartKey(), PartialKey.ROW_COLFAM_COLQUAL) < 0) {
-				next();
-			}
-		} else {
-			next();
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IteratorConstants.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IteratorConstants.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IteratorConstants.java
deleted file mode 100644
index 0db50f6..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/IteratorConstants.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.data.Value;
-
-public class IteratorConstants {
-	public static final byte[] emptyByteArray = new byte[0];
-	public static final Value emptyValue = new Value(emptyByteArray);
-	public static final Text emptyText = new Text(emptyByteArray);
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedMinIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedMinIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedMinIterator.java
deleted file mode 100644
index c25cc72..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedMinIterator.java
+++ /dev/null
@@ -1,173 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-
-/**
- * Iterates over the minimum value of every term with the given prefix and parts delimeter. If, for example, you
- * wanted to find each person's last known position, you would set up the following index:
- * 
- * We want the last date instead of the first, so we'll use reverseDate in our index
- * partitionX index:&lt;prefix&gt;_&lt;personID&gt;_&lt;reverseDate&gt;.&lt;recordID&gt;
- * 
- * (where "." is actually "\u0000")
- * 
- * <code>SortedMinIterator</code> initially seeks to index:prefix in the first partition. From there, it grabs the record
- * as the "document" and then seeks to index:<whatever-the-term-was-up-to-last-delimiter> + "\uFFFD" (last unicode
- * character), which then puts it at the next persion ID in our example.
- * 
- * NOTE that this iterator gives a unique result per tablet server. You may have to process the results to determine
- * the true minimum value.
- * 
- * @author William Wall (wawall)
- */
-public class SortedMinIterator extends SortedRangeIterator {
-	private static final Logger logger = Logger.getLogger(SortedMinIterator.class);
-	
-	/** 
-	 * The option to supply a prefix to the term combination. Defaults to "min"
-	 */
-	public static final String OPTION_PREFIX = "prefix";
-	
-	/**
-	 * The delimiter for the term (note that this is and must be different than the delimiter between the term and record ID). Defaults to "_"
-	 */
-	public static final String OPTION_PARTS_DELIMITER = "partsDelimiter";
-	
-	protected String prefix = "min";
-	protected String partsDelimiter = "_";
-	protected boolean firstKey = true;
-	protected String lastPart = null;
-	
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		super.init(source, options, env);
-		
-		prefix = options.get(OPTION_PREFIX);
-		String s = options.get(OPTION_PARTS_DELIMITER);
-		partsDelimiter = s != null ? s: "_";
-		//TODO: make sure prefix and partsDelimeter is set
-		lower = new Text(prefix);
-	}
-	
-	protected String getPrefix(Key key) {
-		String s = key.getColumnQualifier().toString();
-		int i = s.indexOf(partsDelimiter);
-		if (i > 0) {
-			return s.substring(0, i + partsDelimiter.length());
-		}
-		return null;
-	}
-	
-	protected String getPart(Key key) {
-		String s = key.getColumnQualifier().toString();
-		int i = s.lastIndexOf(partsDelimiter);
-		if (i > 0) {
-			return s.substring(0, i + 1);
-		}
-		return null;
-	}
-
-	@Override
-	protected void setUpDocIds() throws IOException {
-		int count = 0;
-		try {
-			if (testOutOfMemory) {
-				throw new OutOfMemoryError();
-			}
-			
-			long start = System.currentTimeMillis();
-			if (source.hasTop()) {
-				SortedSet<Key> docIds = new TreeSet<Key>();
-				currentPartition = getPartition(source.getTopKey());
-				while (currentPartition != null) {
-					// seek to the prefix (aka lower)
-					Key lowerKey = new Key(currentPartition, colf, lower);
-					source.seek(new Range(lowerKey, true, null, false), indexColfSet, true);
-					
-					// if we don't have a value then quit
-					if (!source.hasTop()) {
-						currentPartition = null;
-					}
-					
-					Key top;
-					while(source.hasTop()) {
-						top = source.getTopKey();
-						
-						if (overallRange != null && overallRange.getEndKey() != null) {
-							// see if we're past the end of the partition range
-							int endCompare = overallRange.getEndKey().compareTo(top, PartialKey.ROW);
-							if ((!overallRange.isEndKeyInclusive() && endCompare <= 0) || endCompare < 0) {
-								// we're done
-								currentPartition = null;
-								break;
-							}
-						}
-						
-						// make sure we're still in the right partition
-						if (currentPartition.compareTo(getPartition(top)) < 0) {
-							currentPartition.set(getPartition(top));
-							break;
-						}
-						
-						// make sure we're still in the right column family
-						if (colf.compareTo(top.getColumnFamily()) < 0) {
-							// if not, then get the next partition
-							currentPartition = getFollowingPartition(top);
-							break;
-						}
-						
-						// make sure we're still in the index prefix
-						String p = getPrefix(top);
-						String part = getPart(top);
-						
-						if (p != null && p.startsWith(prefix)) {
-							if (part != null) {
-								if (!part.equals(lastPart)) {
-									// if the part (e.g. "lastPosition_personId_") is different, then it's valid
-									lastPart = part;
-									docIds.add(buildOutputKey(top));
-									count++;
-								}
-								
-								// seek to the next part
-								lowerKey = new Key(currentPartition, colf, new Text(part + "\uFFFD"));
-								source.seek(new Range(lowerKey, true, null, false), indexColfSet, true);
-							}
-						} else {
-							// we're done in this partition
-							currentPartition = getFollowingPartition(top);
-							break;
-						}
-						
-						// make sure we check to see if we're at the end before potentially seeking back
-						if (!source.hasTop()) {
-							currentPartition = null;
-							break;
-						}
-					}
-				}
-				itr = docIds.iterator();
-				sortComplete = true;
-				logger.debug("setUpDocIds completed in " + (System.currentTimeMillis() - start) + " ms. Count = " + count);
-			} else {
-				logger.warn("There appear to be no records on this tablet");
-			}
-		} catch (OutOfMemoryError e) {
-			logger.warn("OutOfMemory error: Count = " + count);
-			throw new IOException("OutOfMemory error while sorting keys");
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedRangeIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedRangeIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedRangeIterator.java
deleted file mode 100644
index 4541230..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/SortedRangeIterator.java
+++ /dev/null
@@ -1,136 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import cloudbase.core.data.ArrayByteSequence;
-import cloudbase.core.data.ByteSequence;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-
-/**
- * <code>SortedRangeIterator</code> uses the insertion sort functionality of <code>IntersectionRange</code>
- * to store off document keys rather than term keys.
- *  
- * @author William Wall (wawall)
- */
-public class SortedRangeIterator extends IntersectionRange {
-	private static final Logger logger = Logger.getLogger(SortedRangeIterator.class);
-	
-	/** Use this option to set the document column family. Defaults to "event". **/
-	public static final String OPTION_DOC_COLF = "docColf";
-	
-	/** 
-	 * Use this option to retrieve all the documents that match the UUID rather than just the first. This 
-	 * is commonly used in cell-level security models that use the column-qualifier like this:
-	 * UUID \0 field1 [] value
-	 * UUID \0 securedField [ALPHA] secretValue
-	 **/
-	public static final String OPTION_MULTI_DOC = "multiDoc";
-	
-	/** The source document iterator **/
-	protected SortedKeyValueIterator<Key, Value> docSource;
-	
-	/** The document column family. Defaults to "event". **/
-	protected Text docColf;
-	protected Value docValue;
-	
-	protected boolean nextId = false;
-	protected Range docRange = null;
-	protected boolean multiDoc;
-	
-	protected Set<ByteSequence> docColfSet;
-		
-	@Override
-	public void next() throws IOException {
-		if (multiDoc && nextId) {
-			docSource.next();
-			
-			// check to make sure that the docSource top is less than our max key
-			if (docSource.hasTop() && docRange.contains(docSource.getTopKey())) {
-				topKey = docSource.getTopKey();
-				docValue = docSource.getTopValue();
-				return;
-			}
-		}
-		
-		super.next();
-		
-		// if we're looking for multiple documents in the doc source, then
-		// set the max key for our range check
-		if (topKey != null) {
-			Text row = topKey.getRow();
-			Text colf = topKey.getColumnFamily();
-			if (multiDoc) {
-				docRange = new Range(
-					new Key (row, colf, new Text(topKey.getColumnQualifier().toString())),
-					true,
-					new Key (row, colf, new Text(topKey.getColumnQualifier().toString() + "\u0000\uFFFD")),
-					true
-				);
-			} else {
-				docRange = new Range(new Key (row, colf, new Text(topKey.getColumnQualifier().toString())),true, null, false);
-			}
-		}
-		
-		nextId = false;
-		getDocument();
-	}
-
-	@Override
-	public Value getTopValue() {
-		return docValue;
-	}
-
-	@Override
-	protected Key buildOutputKey(Key key) {
-		// we want to build the document key as the output key
-		return new Key(currentPartition, docColf, new Text(getDocID(key)));
-	}
-
-	protected void getDocument() throws IOException {
-		// look up the document value
-		if (topKey != null) {
-			docSource.seek(docRange, docColfSet, true);
-			
-			if (docSource.hasTop() && docRange.contains(docSource.getTopKey())) {
-				// found it!
-				topKey = docSource.getTopKey();
-				docValue = docSource.getTopValue();
-				nextId = true;
-			} else {
-				// does not exist or user had auths that could see the index but not the event
-				logger.warn("Document: " + topKey + " does not exist or user had auths for " + colf + " but not " + docColf);
-				docValue = IteratorConstants.emptyValue;
-			}
-		}
-	}
-
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		super.init(source, options, env);
-		docSource = source.deepCopy(env);
-		if (options.containsKey(OPTION_DOC_COLF)) {
-			docColf = new Text(options.get(OPTION_DOC_COLF));
-		} else {
-			docColf = new Text("event");
-		}
-		
-		if (options.containsKey(OPTION_MULTI_DOC)) {
-			multiDoc = Boolean.parseBoolean(options.get(OPTION_MULTI_DOC));
-		} else {
-			multiDoc = false;
-		}
-		
-		docColfSet = Collections.singleton((ByteSequence) new ArrayByteSequence(docColf.getBytes(), 0, docColf.getLength()));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/UniqueIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/UniqueIterator.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/UniqueIterator.java
deleted file mode 100644
index 2111bbd..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/UniqueIterator.java
+++ /dev/null
@@ -1,95 +0,0 @@
-package ss.cloudbase.core.iterators;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Map;
-
-import cloudbase.core.data.ByteSequence;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SkippingIterator;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-import cloudbase.core.iterators.WrappingIterator;
-
-/**
- * This iterator gets unique keys by the given depth. The depth defaults to PartialKey.ROW_COLFAM.
- * 
- * @author William Wall
- */
-public class UniqueIterator extends WrappingIterator {
-	public static final String OPTION_DEPTH = "depth";
-	private static final Collection<ByteSequence> EMPTY_SET = Collections.emptySet();
-	protected PartialKey depth;
-	protected Range range;
-	protected Key lastKey = null;
-	
-	public UniqueIterator() {}
-	
-	public UniqueIterator(UniqueIterator other) {
-		this.depth = other.depth;
-		this.range = other.range;
-		this.lastKey = other.lastKey;
-	}
-	
-	@Override
-	public void next() throws IOException {
-		consume();
-	}
-
-	protected void consume() throws IOException {
-		if (lastKey != null) {
-			int count = 0;
-			// next is way faster, so we'll try doing that 10 times before seeking 
-			while (getSource().hasTop() && getSource().getTopKey().compareTo(lastKey, depth) == 0 && count < 10) {
-				getSource().next();
-				count++;
-			}
-			if (getSource().hasTop() && getSource().getTopKey().compareTo(lastKey, depth) == 0) {
-				reseek(getSource().getTopKey().followingKey(depth));
-			}
-		}
-		
-		if (getSource().hasTop()) {
-			lastKey = getSource().getTopKey();
-		}
-	}
-	
-	protected void reseek(Key key) throws IOException {
-		if (range.afterEndKey(key)) {
-			range = new Range(range.getEndKey(), true, range.getEndKey(), range.isEndKeyInclusive());
-		} else {
-			range = new Range(key, true, range.getEndKey(), range.isEndKeyInclusive());
-		}
-		getSource().seek(range, EMPTY_SET, false);
-	}
-
-	
-	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		super.init(source, options, env);
-		
-		if (options.containsKey(OPTION_DEPTH)) {
-			depth = PartialKey.getByDepth(Integer.parseInt(options.get(OPTION_DEPTH)));
-		} else {
-			depth = PartialKey.ROW_COLFAM;
-		}
-	}
-
-	@Override
-	public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment env) {
-		UniqueIterator u = new UniqueIterator(this);
-		u.setSource(getSource().deepCopy(env));
-		return u;
-	}
-	
-	@Override
-	public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
-		this.range = range;
-		getSource().seek(range, columnFamilies, inclusive);
-		consume();
-	}
-}



[41/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java b/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java
index 9747f60..661f62b 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/TextColumn.java
@@ -1,5 +1,25 @@
 package mvm.rya.accumulo.documentIndex;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import org.apache.hadoop.io.Text;
 
 public class TextColumn  {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java
index 11d58c5..3d005cf 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullFreeTextIndexer.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java
index 0629adb..b351c13 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullGeoIndexer.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
@@ -117,4 +118,4 @@ public class NullGeoIndexer extends AbstractAccumuloIndexer implements GeoIndexe
         return null;
     }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java
index cf83178..153a3c3 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/NullTemporalIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.accumulo.mr;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java
index 1e26626..8a0d599 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/RyaOutputFormat.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.Closeable;
 import java.io.Flushable;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java
index 5c43687..aefdf74 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/StatementWritable.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java
index 1b469cd..ecc2354 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputToolIndexing.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.fileinput;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java
index d85ffd8..fb80804 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/mr/fileinput/RyaBatchWriterInputTool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.fileinput;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java b/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java
index 9612bce..86cb73e 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/precompQuery/AccumuloPrecompQueryIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.accumulo.precompQuery;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java
index 354c025..21d5de7 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/DocIdIndexer.java
@@ -1,22 +1,36 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.Closeable;
-import java.io.Flushable;
 import java.io.IOException;
 import java.util.Collection;
-import java.util.List;
-import java.util.Map;
 
 import mvm.rya.indexing.accumulo.entity.StarQuery;
-import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet.AccValueFactory;
 
 import org.apache.accumulo.core.client.TableNotFoundException;
-import org.openrdf.model.Value;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.TupleExpr;
 
 public interface DocIdIndexer extends Closeable {
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java
index 8d258b4..5d2678b 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/FilterFunctionOptimizer.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java
index 646fe5b..2d8bae9 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/FreeTextIndexer.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
@@ -58,4 +59,4 @@ public interface FreeTextIndexer extends RyaSecondaryIndexer {
 
 	@Override
 	public abstract void close() throws IOException;
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java
index 9ef2cce..7c04903 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/GeoIndexer.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java
index eb0a397..ee3d444 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ExternalIndexMatcher.java
@@ -1,10 +1,26 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 
-import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
+import java.util.Iterator;
 
 import org.openrdf.query.algebra.TupleExpr;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java
index 114fff3..27a0d15 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessor.java
@@ -1,31 +1,31 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
-
-
 /*
- * #%L
- * mvm.rya.rya.indexing
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
+
+
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 import mvm.rya.indexing.external.QueryVariableNormalizer.VarCollector;
@@ -43,7 +43,6 @@ import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
 import org.openrdf.query.algebra.helpers.StatementPatternCollector;
 
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java
index 4862a5c..fa1dc13 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexListPruner.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java
index 88dfce9..74df958 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidator.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
 import java.util.NoSuchElementException;
 import java.util.Set;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java
index 6843025..3586a5e 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexTupleGenerator.java
@@ -1,8 +1,26 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
 
 import org.openrdf.query.algebra.TupleExpr;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java
index d65b2bd..acf3f6a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGenerator.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java
index 097d37d..dbd1972 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/IndexedQueryPlanSelector.java
@@ -1,7 +1,26 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
-import java.util.List;
 
 import org.openrdf.query.algebra.TupleExpr;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
index 92aa393..a333dcb 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
 import java.util.Set;
 
@@ -12,7 +32,6 @@ import org.openrdf.query.algebra.Projection;
 import org.openrdf.query.algebra.QueryModelNode;
 import org.openrdf.query.algebra.StatementPattern;
 import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.evaluation.impl.ExternalSet;
 import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
 
 import com.google.common.collect.Sets;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
index 137323b..2776a9e 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
@@ -1,16 +1,32 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
-import java.util.ArrayList;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Set;
 
 import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
-import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
 
 import org.openrdf.query.algebra.BindingSetAssignment;
 import org.openrdf.query.algebra.Filter;
@@ -23,7 +39,6 @@ import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
 
 import com.beust.jcommander.internal.Lists;
 import com.google.common.collect.Collections2;
-import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
 public class TupleExecutionPlanGenerator implements IndexTupleGenerator {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
index ac08d97..089ef5d 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
index afba7fe..4960d78 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
@@ -1,7 +1,26 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
-import java.util.List;
 
 import org.openrdf.query.algebra.TupleExpr;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
index c278fcc..b3c3fcd 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
@@ -1,7 +1,26 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Collections;
-import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
 import java.util.NoSuchElementException;
@@ -19,6 +38,7 @@ import org.openrdf.query.algebra.TupleExpr;
 import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
 import org.openrdf.query.parser.ParsedQuery;
 import org.openrdf.query.parser.sparql.SPARQLParser;
+
 import com.google.common.base.Joiner;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
index b110522..7e72821 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 import java.util.Map;
 import java.util.Set;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java
index 553c3b8..1d4c4bb 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java
@@ -1,8 +1,26 @@
 package mvm.rya.indexing;
 
-import java.util.Set;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 
-import mvm.rya.indexing.accumulo.geo.GeoTupleSet;
+import java.util.Set;
 
 import org.openrdf.model.URI;
 import org.openrdf.model.Value;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java
index 69117fa..e96b8a3 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -12,7 +32,6 @@ import org.openrdf.query.algebra.ValueConstant;
 import org.openrdf.query.algebra.ValueExpr;
 import org.openrdf.query.algebra.Var;
 
-import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 public class IndexingFunctionRegistry {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java b/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java
index 16b869d..d61c5ae 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java b/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java
index ca27c4a..2caf81c 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
@@ -17,8 +37,6 @@ import org.openrdf.model.impl.ContextStatementImpl;
 import org.openrdf.model.impl.StatementImpl;
 import org.openrdf.model.impl.URIImpl;
 
-import com.google.common.collect.Constraints;
-
 /**
  * Store and format the various temporal index keys.
  * Row Keys are in these two forms, where [x] denotes x is optional:
@@ -310,4 +328,4 @@ public class KeyParts implements Iterable<KeyParts> {
 			return sb.toString();
 		}
 		
-	}
\ No newline at end of file
+	}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java
index 9f7ef86..1aecd98 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.Closeable;
@@ -11,7 +31,6 @@ import java.util.Map;
 
 import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet.AccValueFactory;
 
-import org.apache.accumulo.core.client.BatchScanner;
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.openrdf.model.Value;
 import org.openrdf.query.BindingSet;
@@ -41,4 +60,4 @@ public interface PrecompQueryIndexer extends Closeable, Flushable {
     public abstract void close() throws IOException;
 }
 
- 
\ No newline at end of file
+ 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java b/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java
index 5d9615f..646aab0 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import mvm.rya.accumulo.AccumuloRdfConfiguration;
 import mvm.rya.accumulo.AccumuloRyaDAO;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java
index c2cbbfc..ce94556 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 import org.openrdf.model.Statement;
 import org.openrdf.query.QueryEvaluationException;
@@ -22,4 +42,4 @@ public interface SearchFunction {
     public abstract CloseableIteration<Statement, QueryEvaluationException> performSearch(String searchTerms, StatementContraints contraints)
             throws QueryEvaluationException;
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java
index dad4e35..719cc2f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Map;
 
 import org.apache.log4j.Logger;
@@ -48,4 +68,4 @@ public abstract class SearchFunctionFactory {
 
    
 }
-  
\ No newline at end of file
+  

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java b/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java
index 3f3a390..437c74d 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/StatementContraints.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Set;
 
 import org.openrdf.model.Resource;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java
index f2c6892..be06e25 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalIndexer.java
@@ -1,11 +1,28 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
-import java.io.Closeable;
-import java.io.Flushable;
 import java.io.IOException;
-import java.util.Collection;
 import java.util.Set;
 
 import mvm.rya.api.persist.index.RyaSecondaryIndexer;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java
index e988b70..f4e6d95 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInstant.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.Serializable;
 
 import org.joda.time.DateTime;
@@ -60,4 +80,4 @@ public interface TemporalInstant extends Comparable<TemporalInstant>, Serializab
 	 */
 	public DateTime getAsDateTime();
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java
index c8af18d..b23b99c 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/TemporalInterval.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.UnsupportedEncodingException;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java
index aefd3f5..ae16062 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/ConfigUtils.java
@@ -1,31 +1,31 @@
 package mvm.rya.indexing.accumulo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
 import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.indexing.FilterFunctionOptimizer;
 import mvm.rya.indexing.accumulo.entity.EntityCentricIndex;
@@ -59,7 +59,6 @@ import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.log4j.Logger;
 import org.openrdf.model.URI;
 import org.openrdf.model.impl.URIImpl;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
 
 import com.google.common.collect.Lists;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java
index 8d350bf..8fa3008 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/Md5Hash.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.accumulo.core.data.Value;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.codec.binary.StringUtils;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java
index 773ee39..f5d6d0e 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/StatementSerializer.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.Set;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
index f3eb282..feb894f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTE;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
 import info.aduna.iteration.CloseableIteration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
index 2275d41..b8b3f65 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV;
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java
index 0a9a91e..2030e58 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityLocalityGroupSetter.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java
index 184aafe..e46c321 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityOptimizer.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java
index 71ac990..dbe7a53 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityTupleSet.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java
index ccb6c02..e9d2f85 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/StarQuery.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;


[21/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/RdfIO.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/RdfIO.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/RdfIO.java
deleted file mode 100644
index b7d6ec8..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/RdfIO.java
+++ /dev/null
@@ -1,166 +0,0 @@
-package mvm.mmrts.rdf.partition.utils;
-
-import com.google.common.io.ByteArrayDataInput;
-import com.google.common.io.ByteArrayDataOutput;
-import com.google.common.io.ByteStreams;
-import mvm.mmrts.rdf.partition.InvalidValueTypeMarkerRuntimeException;
-import org.openrdf.model.*;
-import org.openrdf.model.impl.StatementImpl;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Class RdfIO
- * Date: Jul 6, 2011
- * Time: 12:13:18 PM
- */
-public class RdfIO {
-
-    public static byte[] writeStatement(Statement statement, boolean document) throws IOException {
-        if (statement == null)
-            return new byte[]{};
-        ByteArrayDataOutput dataOut = ByteStreams.newDataOutput();
-
-        if (document) {
-            writeValue(dataOut, statement.getSubject());
-            dataOut.writeByte(FAMILY_DELIM);
-            writeValue(dataOut, statement.getPredicate());
-            dataOut.writeByte(FAMILY_DELIM);
-            writeValue(dataOut, statement.getObject());
-//            dataOut.writeByte(FAMILY_DELIM);
-        } else {
-            //index
-            writeValue(dataOut, statement.getPredicate());
-            dataOut.writeByte(INDEX_DELIM);
-            writeValue(dataOut, statement.getObject());
-            dataOut.writeByte(FAMILY_DELIM);
-            writeValue(dataOut, statement.getSubject());
-//            dataOut.writeByte(FAMILY_DELIM);
-        }
-
-        return dataOut.toByteArray();
-    }
-
-    public static byte[] writeValue(Value value) throws IOException {
-        ByteArrayDataOutput output = ByteStreams.newDataOutput();
-        writeValue(output, value);
-        return output.toByteArray();
-    }
-
-    public static void writeValue(ByteArrayDataOutput dataOut, Value value) throws IOException {
-        if (value == null || dataOut == null)
-            throw new IllegalArgumentException("Arguments cannot be null");
-        if (value instanceof URI) {
-            dataOut.writeByte(URI_MARKER);
-            dataOut.write(value.toString().getBytes());
-        } else if (value instanceof BNode) {
-            dataOut.writeByte(BNODE_MARKER);
-            dataOut.write(((BNode) value).getID().getBytes());
-        } else if (value instanceof Literal) {
-            Literal lit = (Literal) value;
-
-            String label = lit.getLabel();
-            String language = lit.getLanguage();
-            URI datatype = lit.getDatatype();
-
-            if (datatype != null) {
-                dataOut.writeByte(DATATYPE_LITERAL_MARKER);
-                dataOut.write(label.getBytes());
-                dataOut.writeByte(DATATYPE_LITERAL_MARKER);
-                writeValue(dataOut, datatype);
-            } else if (language != null) {
-                dataOut.writeByte(LANG_LITERAL_MARKER);
-                dataOut.write(label.getBytes());
-                dataOut.writeByte(LANG_LITERAL_MARKER);
-                dataOut.write(language.getBytes());
-            } else {
-                dataOut.writeByte(PLAIN_LITERAL_MARKER);
-                dataOut.write(label.getBytes());
-            }
-        } else {
-            throw new IllegalArgumentException("unexpected value type: "
-                    + value.getClass());
-        }
-    }
-
-    public static Statement readStatement(ByteArrayDataInput dataIn, ValueFactory vf)
-            throws IOException {
-
-        return readStatement(dataIn, vf, true);
-    }
-
-    //TODO: This could be faster somehow, more efficient
-
-    private static byte[] readFully(ByteArrayDataInput dataIn, byte delim) {
-        ByteArrayDataOutput output = ByteStreams.newDataOutput();
-        try {
-            byte curr;
-            while ((curr = dataIn.readByte()) != delim) {
-                output.writeByte(curr);
-            }
-        } catch (IllegalStateException e) {
-            //end of array
-        }
-        return output.toByteArray();
-    }
-
-    public static Statement readStatement(ByteArrayDataInput dataIn, ValueFactory vf, boolean doc)
-            throws IOException {
-
-        //doc order: subject/0predicate/0object
-        //index order: predicate/1object/0subject
-        byte delim = (doc) ? FAMILY_DELIM : INDEX_DELIM;
-        List<Value> values = new ArrayList<Value>();
-        while (values.size() < 3) {
-            Value addThis = readValue(dataIn, vf, delim);
-            values.add(addThis);
-            delim = FAMILY_DELIM;
-        }
-
-        if (doc)
-            return new StatementImpl((Resource) values.get(0), (URI) values.get(1), values.get(2));
-        else
-            return new StatementImpl((Resource) values.get(2), (URI) values.get(0), values.get(1));
-    }
-
-    public static Value readValue(ByteArrayDataInput dataIn, ValueFactory vf, byte delim) throws IOException {
-        int valueTypeMarker;
-        try {
-            valueTypeMarker = dataIn.readByte();
-        } catch (Exception e) {
-            throw new IOException(e);
-        }
-        Value addThis = null;
-        if (valueTypeMarker == URI_MARKER) {
-            byte[] bytes = readFully(dataIn, delim);
-            addThis = vf.createURI(new String(bytes));
-        } else if (valueTypeMarker == BNODE_MARKER) {
-            byte[] bytes = readFully(dataIn, delim);
-            addThis = vf.createBNode(new String(bytes));
-        } else if (valueTypeMarker == PLAIN_LITERAL_MARKER) {
-            byte[] bytes = readFully(dataIn, delim);
-            addThis = vf.createLiteral(new String(bytes));
-        } else if (valueTypeMarker == LANG_LITERAL_MARKER) {
-            byte[] bytes = readFully(dataIn, (byte) LANG_LITERAL_MARKER);
-            String label = new String(bytes);
-            bytes = readFully(dataIn, delim);
-            addThis = vf.createLiteral(label, new String(bytes));
-        } else if (valueTypeMarker == DATATYPE_LITERAL_MARKER) {
-            byte[] bytes = readFully(dataIn, (byte) DATATYPE_LITERAL_MARKER);
-            String label_s = new String(bytes);
-            if (URI_MARKER != dataIn.readByte()) {
-                throw new IllegalArgumentException("Expected a URI datatype here");
-            }
-            bytes = readFully(dataIn, delim);
-            addThis = vf.createLiteral(label_s, vf.createURI(new String(bytes)));
-        } else {
-            throw new InvalidValueTypeMarkerRuntimeException(valueTypeMarker, "Invalid value type marker: "
-                    + valueTypeMarker);
-        }
-        return addThis;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/resources/partitionTableLoad.cbexec
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/resources/partitionTableLoad.cbexec b/partition/partition.rdf/src/main/resources/partitionTableLoad.cbexec
deleted file mode 100644
index 74ddbe2..0000000
--- a/partition/partition.rdf/src/main/resources/partitionTableLoad.cbexec
+++ /dev/null
@@ -1,4 +0,0 @@
-createtable rdfShardIndex
-createtable rdfPartition
-config -t rdfPartition -s table.split.threshold=3G
-config -t rdfPartition -s table.compaction.major.ratio=1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData.java
deleted file mode 100644
index d5c082a..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.mmrts.rdf.partition.converter.ContextColVisConverter;
-import org.openrdf.model.Resource;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.sail.SailRepository;
-
-import javax.xml.datatype.DatatypeFactory;
-
-public class LoadPartitionData {
-
-    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
-    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
-
-    static ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    /**
-     * @param args
-     */
-    public static void main(String[] args) {
-        try {
-
-            final PartitionSail store = new PartitionSail(new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes()), "rdfPartition");
-            store.setContextColVisConverter(new ContextColVisConverter() {
-
-                @Override
-                public ColumnVisibility convertContexts(Resource... contexts) {
-                    if (contexts != null) {
-                        StringBuffer sb = new StringBuffer();
-                        for (int i = 0; i < contexts.length; i++) {
-                            Resource context = contexts[i];
-                            if (context instanceof URI) {
-                                URI uri = (URI) context;
-                                sb.append(uri.getLocalName());
-                                if (i != (contexts.length - 1)) {
-                                    sb.append("|");
-                                }
-                            }
-                        }
-                        return new ColumnVisibility(sb.toString());
-                    }
-                    return null;
-                }
-            });
-            Repository myRepository = new SailRepository(store);
-            myRepository.initialize();
-
-            RepositoryConnection conn = myRepository.getConnection();
-
-            URI A = vf.createURI("urn:colvis#A");
-            URI B = vf.createURI("urn:colvis#B");
-            URI C = vf.createURI("urn:colvis#C");
-
-            String uuid = "uuidAuth1";
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")), A, B, C);
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, "objectUuid1")), A, B);
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")), A, B);
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit")), A);
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))), B, C);
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))), C);
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "booleanLit"), vf.createLiteral(true)));
-
-            conn.commit();
-            conn.close();
-
-            myRepository.shutDown();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData2.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData2.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData2.java
deleted file mode 100644
index 29682ad..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadPartitionData2.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.mmrts.rdf.partition.converter.ContextColVisConverter;
-import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-import org.openrdf.model.Resource;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.sail.SailRepository;
-
-public class LoadPartitionData2 {
-
-    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
-    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
-
-    static ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    /**
-     * @param args
-     */
-    public static void main(String[] args) {
-        try {
-
-            DateHashModShardValueGenerator gen = new DateHashModShardValueGenerator();
-            gen.setBaseMod(10);
-            final PartitionSail store = new PartitionSail(new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes()), "rdfPartition", gen);
-            store.setContextColVisConverter(new ContextColVisConverter() {
-
-                @Override
-                public ColumnVisibility convertContexts(Resource... contexts) {
-                    if (contexts != null) {
-                        StringBuffer sb = new StringBuffer();
-                        for (int i = 0; i < contexts.length; i++) {
-                            Resource context = contexts[i];
-                            if (context instanceof URI) {
-                                URI uri = (URI) context;
-                                sb.append(uri.getLocalName());
-                                if (i != (contexts.length - 1)) {
-                                    sb.append("|");
-                                }
-                            }
-                        }
-                        return new ColumnVisibility(sb.toString());
-                    }
-                    return null;
-                }
-            });
-            Repository myRepository = new SailRepository(store);
-            myRepository.initialize();
-
-            RepositoryConnection conn = myRepository.getConnection();
-
-            conn.add(new StatementImpl(vf.createURI("http://www.Department0.University0.edu/GraduateStudent44"), vf.createURI("urn:lubm:test#specific"), vf.createURI("urn:lubm:test#value")));
-
-            conn.commit();
-            conn.close();
-
-            myRepository.shutDown();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadSampleData.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadSampleData.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadSampleData.java
deleted file mode 100644
index 4e86491..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/LoadSampleData.java
+++ /dev/null
@@ -1,64 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.mmrts.rdf.partition.converter.ContextColVisConverter;
-import org.openrdf.model.Resource;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.sail.SailRepository;
-
-import javax.xml.datatype.DatatypeFactory;
-
-public class LoadSampleData {
-
-    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
-    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
-
-    static ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    /**
-     * @param args
-     */
-    public static void main(String[] args) {
-        try {
-
-            final PartitionSail store = new PartitionSail(new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes()), "partTest", "shardIndexTest");
-            
-            Repository myRepository = new SailRepository(store);
-            myRepository.initialize();
-
-            RepositoryConnection conn = myRepository.getConnection();
-
-            String uuid = "uuidAuth1";
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, "objectUuid1")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "booleanLit"), vf.createLiteral(true)));
-
-            uuid = "uuidAuth4";
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, "objectUuid1")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "booleanLit"), vf.createLiteral(true)));
-
-            conn.commit();
-            conn.close();
-
-            myRepository.shutDown();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/MemStoreTst.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/MemStoreTst.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/MemStoreTst.java
deleted file mode 100644
index c68e067..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/MemStoreTst.java
+++ /dev/null
@@ -1,71 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.sail.memory.MemoryStore;
-
-import javax.xml.datatype.DatatypeFactory;
-
-/**
- * Class MemStoreTst
- * Date: Aug 30, 2011
- * Time: 10:04:02 AM
- */
-public class MemStoreTst {
-    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
-    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
-
-    static ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    public static void main(String[] args) {
-
-        try {
-            MemoryStore store = new MemoryStore();
-            Repository myRepository = new SailRepository(store);
-            myRepository.initialize();
-
-            RepositoryConnection conn = myRepository.getConnection();
-
-            String uuid = "uuid1";
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, "objectUuid1")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit1")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit2")));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "booleanLit"), vf.createLiteral(true)));
-            conn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "booleanLit"), vf.createLiteral(false)));
-
-            conn.commit();
-
-            //query
-            String query = "PREFIX tdp:<" + NAMESPACE + ">\n" +
-                    "SELECT * WHERE {\n" +
-                    "   ?id tdp:createdItem tdp:objectUuid1.\n" +
-                    "   ?id tdp:stringLit ?str.\n" +
-                    "   ?id tdp:booleanLit ?bl.\n" +
-                    "}";
-
-            TupleQuery tupleQuery = conn.prepareTupleQuery(
-                    QueryLanguage.SPARQL, query);
-            TupleQueryResultHandler writer = new SPARQLResultsXMLWriter(System.out);
-            tupleQuery.evaluate(writer);
-
-            conn.close();
-
-            myRepository.shutDown();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/PartitionConnectionTest.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/PartitionConnectionTest.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/PartitionConnectionTest.java
deleted file mode 100644
index 3aaeb0d..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/PartitionConnectionTest.java
+++ /dev/null
@@ -1,771 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.mock.MockInstance;
-import junit.framework.TestCase;
-import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-import org.openrdf.model.Namespace;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.*;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.RepositoryResult;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-
-import javax.xml.datatype.DatatypeConfigurationException;
-import javax.xml.datatype.DatatypeFactory;
-import java.util.List;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Class PartitionConnectionTest
- * Date: Jul 6, 2011
- * Time: 5:24:07 PM
- */
-public class PartitionConnectionTest extends TestCase {
-    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
-    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
-    public static final String HBNAMESPACE = "http://here/2010/tracked-data-provenance/heartbeat/ns#";
-    public static final String HB_TIMESTAMP = HBNAMESPACE + "timestamp";
-
-    private SailRepository repository;
-    private SailRepositoryConnection connection;
-
-    ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    private String objectUuid = "objectuuid1";
-    private static final String TABLE = "rdfPartition";
-    private static final String SHARD_TABLE = "rdfShardIndex";
-    private String ancestor = "ancestor1";
-    private String descendant = "descendant1";
-    private static final long START = 1309532965000l;
-    private static final long END = 1310566686000l;
-    private Connector connector;
-
-    @Override
-    protected void setUp() throws Exception {
-        super.setUp();
-//        connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password");
-        connector = new MockInstance().getConnector("", "");
-
-        PartitionSail sail = new PartitionSail(connector, TABLE, SHARD_TABLE, new DateHashModShardValueGenerator() {
-            @Override
-            public String generateShardValue(Object obj) {
-                return this.generateShardValue(START + 1000, obj);
-            }
-        });
-
-        repository = new SailRepository(sail);
-        repository.initialize();
-        connection = repository.getConnection();
-
-        loadData();
-    }
-
-    private void loadData() throws RepositoryException, DatatypeConfigurationException {
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, objectUuid), vf.createURI(NAMESPACE, "name"), vf.createLiteral("objUuid")));
-        //created
-        String uuid = "uuid1";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit1")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit2")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit3")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit4")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit1")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit2")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit3")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))));
-        //clicked
-        uuid = "uuid2";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Clicked")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "clickedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:B")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 2, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 3, 0, 0, 0))));
-        //deleted
-        uuid = "uuid3";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Deleted")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "deletedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:C")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 4, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 5, 0, 0, 0))));
-        //dropped
-        uuid = "uuid4";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Dropped")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "droppedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:D")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 6, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 7, 0, 0, 0))));
-        //received
-        uuid = "uuid5";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Received")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "receivedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:E")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 8, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 9, 0, 0, 0))));
-        //sent
-        uuid = "uuid6";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Sent")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "sentItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:F")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 10, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 11, 0, 0, 0))));
-        //stored
-        uuid = "uuid7";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Stored")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "storedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:G")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 12, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 13, 0, 0, 0))));
-
-        //derivedFrom
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "derivedFrom"), vf.createURI(NAMESPACE, ancestor)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "name"), vf.createLiteral("descendantOne")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, ancestor), vf.createURI(NAMESPACE, "name"), vf.createLiteral("ancestor1")));
-
-        //heartbeats
-        String hbuuid = "hbuuid1";
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 1) + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(1 + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:A")));
-        connection.add(new StatementImpl(vf.createURI("urn:system:A"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-
-        hbuuid = "hbuuid2";
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 2) + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(2 + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:B")));
-        connection.add(new StatementImpl(vf.createURI("urn:system:B"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-
-        hbuuid = "hbuuid3";
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 3) + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(3 + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:C")));
-        connection.add(new StatementImpl(vf.createURI("urn:system:C"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-
-        //MMRTS-150
-        connection.add(new StatementImpl(vf.createURI("urn:mmrts150_1"), vf.createURI("urn:bool"), vf.createLiteral("true")));
-        connection.add(new StatementImpl(vf.createURI("urn:mmrts150_2"), vf.createURI("urn:bool"), vf.createLiteral("true")));
-        connection.add(new StatementImpl(vf.createURI("urn:mmrts150_3"), vf.createURI("urn:bool"), vf.createLiteral("true")));
-        connection.add(new StatementImpl(vf.createURI("urn:mmrts150_4"), vf.createURI("urn:bool"), vf.createLiteral("true")));
-        connection.add(new StatementImpl(vf.createURI("urn:mmrts150_4"), vf.createURI("urn:sentItem"), vf.createLiteral("thisItemNum")));
-        connection.add(new StatementImpl(vf.createURI("urn:mmrts150_5"), vf.createURI("urn:bool"), vf.createLiteral("true")));
-        connection.commit();
-    }
-
-    @Override
-    protected void tearDown() throws Exception {
-        super.tearDown();
-        connection.close();
-        repository.shutDown();
-    }
-
-//    public void testScanAll() throws Exception {
-//        Scanner sc = connector.createScanner(TABLE, ALL_AUTHORIZATIONS);
-//        for (Object aSc : sc) System.out.println((Map.Entry<Key, Value>) aSc);
-//
-//    }
-
-    public void testNamespace() throws Exception {
-        String namespace = "urn:testNamespace#";
-        String prefix = "pfx";
-        connection.setNamespace(prefix, namespace);
-
-        assertEquals(namespace, connection.getNamespace(prefix));
-    }
-
-    public void testGetNamespaces() throws Exception {
-        String namespace = "urn:testNamespace#";
-        String prefix = "pfx";
-        connection.setNamespace(prefix, namespace);
-
-        namespace = "urn:testNamespace2#";
-        prefix = "pfx2";
-        connection.setNamespace(prefix, namespace);
-
-        RepositoryResult<Namespace> result = connection.getNamespaces();
-        int count = 0;
-        while (result.hasNext()) {
-            result.next();
-            count++;
-        }
-
-        assertEquals(count, 2);
-    }
-
-    public void testAddCommitStatement() throws Exception {
-        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        connection.add(stmt);
-        connection.commit();
-    }
-
-    public void testSelectOnlyQuery() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "ns:uuid1 ns:createdItem ?cr.\n" +
-                "ns:uuid1 ns:reportedAt ?ra.\n" +
-                "ns:uuid1 ns:performedAt ?pa.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    //provenance Queries//////////////////////////////////////////////////////////////////////
-
-    public void testEventInfo() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "   ns:uuid1 ?p ?o.\n" +
-                "}\n";
-
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(12, tupleHandler.getCount());
-    }
-
-    public void testAllAncestors() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "ns:" + descendant + " ns:derivedFrom ?dr.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        //        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testAllDescendants() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "?ds ns:derivedFrom ns:" + ancestor + ".\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testEventsForUri() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "select * where {\n" +
-                "{" +
-                "   ?s rdf:type ns:Created.\n" +
-                "   ?s ns:createdItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Clicked.\n" +
-                "   ?s ns:clickedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Deleted.\n" +
-                "   ?s ns:deletedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Dropped.\n" +
-                "   ?s ns:droppedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Received.\n" +
-                "   ?s ns:receivedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Stored.\n" +
-                "   ?s ns:storedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Sent.\n" +
-                "   ?s ns:sentItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(7, tupleHandler.getCount());
-    }
-
-    public void testAllEvents() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "select * where {\n" +
-                "{" +
-                "   ?s rdf:type ns:Created.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Clicked.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Deleted.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Dropped.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Received.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Stored.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Sent.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(7, tupleHandler.getCount());
-//        System.out.println(tupleHandler.getCount());
-    }
-
-    public void testEventsBtwnSystems() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "select * where {\n" +
-                "   ?sendEvent rdf:type ns:Sent;\n" +
-                "              ns:sentItem ?objUuid;\n" +
-                "              ns:performedBy <urn:system:F>.\n" +
-                "   ?recEvent rdf:type ns:Received;\n" +
-                "              ns:receivedItem ?objUuid;\n" +
-                "              ns:performedBy <urn:system:E>.\n" +
-                "   FILTER(mvmpart:timeRange(?sendEvent, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "   FILTER(mvmpart:timeRange(?recEvent, ns:performedAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testHeartbeatCounts() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX hns:<" + HBNAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "select * where {\n" +
-                "   ?hb rdf:type hns:HeartbeatMeasurement;\n" +
-                "              hns:count ?count;\n" +
-                "              hns:systemName ?systemName.\n" +
-                "   FILTER(mvmpart:timeRange(?hb, hns:timestamp, " + START + ", " + (START + 2) + ", 'TIMESTAMP'))\n" +
-                "}\n";
-        System.out.println(query);
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(START + 2));
-//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(HB_TIMESTAMP));
-//        tupleQuery.setBinding(TIME_TYPE_PROP, vf.createLiteral(TimeType.TIMESTAMP.name()));
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-    }
-
-    //provenance Queries//////////////////////////////////////////////////////////////////////
-
-    public void testCreatedEvents() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "   ?s ns:createdItem ns:objectuuid1.\n" +
-                "   ?s ns:reportedAt ?ra.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testSelectAllAfterFilter() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "   ?s ns:createdItem ns:objectuuid1.\n" +
-                "   ?s ?p ?o.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(12, tupleHandler.getCount());
-    }
-
-    public void testFilterQuery() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "ns:uuid1 ns:createdItem ?cr.\n" +
-                "ns:uuid1 ns:stringLit ?sl.\n" +
-                "FILTER regex(?sl, \"stringLit1\")" +
-                "ns:uuid1 ns:reportedAt ?ra.\n" +
-                "ns:uuid1 ns:performedAt ?pa.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        //        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testMultiplePredicatesMultipleBindingSets() throws Exception {
-        //MMRTS-121
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "?id ns:createdItem ns:objectuuid1.\n" +
-                "?id ns:stringLit ?sl.\n" +
-                "?id ns:strLit1 ?s2.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(12, tupleHandler.getCount());
-    }
-
-    public void testMultiShardLookupTimeRange() throws Exception {
-        //MMRTS-113
-        String query = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "SELECT * WHERE\n" +
-                "{\n" +
-                "?id hb:timestamp ?timestamp.\n" +
-                "FILTER(mvmpart:timeRange(?id, hb:timestamp, " + START + " , " + (START + 2) + " , 'TIMESTAMP'))\n" +
-                "?id hb:count ?count.\n" +
-                "?system hb:heartbeat ?id.\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-    }
-
-    public void testMultiShardLookupTimeRangeValueConst() throws Exception {
-        //MMRTS-113
-        String query = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "SELECT * WHERE\n" +
-                "{\n" +
-                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:timestamp ?timestamp.\n" +
-                "FILTER(mvmpart:timeRange(<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2>, hb:timestamp, " + START + " , " + END + " , 'TIMESTAMP'))\n" +
-                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:count ?count.\n" +
-                "?system hb:heartbeat <http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2>.\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testGlobalTimeRange() throws Exception {
-        //MMRTS-113
-        String query = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "SELECT * WHERE\n" +
-                "{\n" +
-                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:timestamp ?timestamp.\n" +
-                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:count ?count.\n" +
-                "?system hb:heartbeat <http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2>.\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(HBNAMESPACE, HB_TIMESTAMP));
-        tupleQuery.setBinding(TIME_TYPE_PROP, vf.createLiteral(TimeType.TIMESTAMP.name()));
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount()); //because system does not have a timerange
-    }
-
-    public void testLinkQuery() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "SELECT * WHERE {\n" +
-                "     <http://here/2010/tracked-data-provenance/ns#uuid1> ns:createdItem ?o .\n" +
-                "     ?o ns:name ?n .\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testRangeOverDuplicateItems() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "SELECT * WHERE {\n" +
-                "     ?subj <urn:pred> \"obj2\" .\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-    }
-
-    public void testMMRTS147SubjectOverMultipleShards() throws Exception {
-        PartitionSail sail = new PartitionSail(connector, TABLE, SHARD_TABLE);
-
-        SailRepository tmpRepo = new SailRepository(sail);
-        tmpRepo.initialize();
-        SailRepositoryConnection tmpConn = tmpRepo.getConnection();
-        String uuid = "mmrts147subj";
-        //add for the current date shard
-        tmpConn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "currentPred1"), vf.createLiteral("currentValue1")));
-
-        //add for the old date shard
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "currentPred2"), vf.createLiteral("currentValue2")));
-
-        tmpConn.commit();
-        connection.commit();
-
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "SELECT * WHERE {\n" +
-                "     ns:mmrts147subj ?p ?o .\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(new PrintTupleHandler());
-//        CountTupleHandler tupleHandler = new CountTupleHandler();
-//        tupleQuery.evaluate(tupleHandler);
-//        assertEquals(2, tupleHandler.getCount());
-    }
-
-    public void testMMRTS147PredicatesOverMultipleShards() throws Exception {
-        PartitionSail sail = new PartitionSail(connector, TABLE, SHARD_TABLE);
-
-        SailRepository tmpRepo = new SailRepository(sail);
-        tmpRepo.initialize();
-        SailRepositoryConnection tmpConn = tmpRepo.getConnection();
-        String uuid = "mmrts147pred";
-        //add for the current date shard
-        tmpConn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "currentPred1"), vf.createLiteral("currentValue1")));
-
-        //add for the old date shard
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "currentPred2"), vf.createLiteral("currentValue2")));
-
-        tmpConn.commit();
-        connection.commit();
-
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "SELECT * WHERE {\n" +
-                "     ?s ns:currentPred1 'currentValue1' .\n" +
-                "     ?s ns:currentPred2 'currentValue2' .\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount());
-        //MMRTS-147, this is a problem. The same subject has been separated across shards and
-        // cannot be queried with this join
-    }
-
-    /**
-     * Make sure that the shard does not participate in the time range given
-     *
-     * @throws Exception
-     */
-    public void testMMRTS151ShardDoesNotMatchTimeRange() throws Exception {
-        PartitionSail sail = new PartitionSail(connector, TABLE);
-
-        SailRepository tmpRepo = new SailRepository(sail);
-        tmpRepo.initialize();
-        SailRepositoryConnection tmpConn = tmpRepo.getConnection();
-        String uuid = "mmrts151";
-
-        tmpConn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Clicked")));
-        tmpConn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "clickedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        tmpConn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:B")));
-        tmpConn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "perfAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 2, 0, 0, 0))));
-        tmpConn.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "repAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 3, 0, 0, 0))));
-
-        tmpConn.commit();
-
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "select * where {\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:perfAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "}\n";
-        TupleQuery tupleQuery = tmpConn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        //now add a query with the shardRange function to make sure nothing comes back
-        query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "select * where {\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:perfAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "   FILTER(mvmpart:shardRange(?s, " + START + ", " + END + "))\n" +
-                "}\n";
-        tupleQuery = tmpConn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount());
-
-        //now make sure the shard range is for the curr shard
-        long curr = System.currentTimeMillis();
-        query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "select * where {\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   FILTER(mvmpart:timeRange(?s, ns:perfAt, " + START + ", " + END + ", 'XMLDATETIME'))\n" +
-                "   FILTER(mvmpart:shardRange(?s, " + (curr - (10000l)/***/) + ", " + curr + "))\n" +
-                "}\n";
-//        System.out.println(query);
-        tupleQuery = tmpConn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-    
-    public void testMMRTS150() throws Exception {
-        //MMRTS-150
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "?id <urn:bool> \"true\".\n" +
-                "?id <urn:sentItem> ?item.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testMikeQuery() throws Exception {
-        String query = "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "PREFIX tdp: <" + NAMESPACE + ">\n" +
-                "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "SELECT * WHERE{\n" +
-                "?id tdp:performedAt ?timestamp.\n" +
-                "FILTER(mvmpart:timeRange(?id, tdp:performedAt, 132370075535, 1324060075534, 'XMLDATETIME')).\n" +
-                "?id tdp:performedBy ?sysname.\n" +
-//                "?id tdp:performedBy 'thesystemname'.\n" +
-                "?id rdf:type ?et\n" +
-//                "?id rdf:type tdp:EventType\n" +
-                "}\n" +
-                "Limit 10";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(new PrintTupleHandler());
-    }
-
-    private static class PrintTupleHandler implements TupleQueryResultHandler {
-
-        @Override
-        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-
-        }
-
-        @Override
-        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-            System.out.println(bindingSet);
-        }
-    }
-
-    private static class CountTupleHandler implements TupleQueryResultHandler {
-
-        int count = 0;
-
-        @Override
-        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-            count++;
-        }
-
-        public int getCount() {
-            return count;
-        }
-    }
-
-}


[46/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java
index c09ccc7..3dfb9c4 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/LongRyaTypeResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.domain.RyaType;
 import org.junit.Test;
 import org.openrdf.model.vocabulary.XMLSchema;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java
index 225ceda..f962b88 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/RyaURIResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaURI;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java
index 014dceb..2baa92d 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/HashedWholeRowTripleResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Map;
 
 import junit.framework.TestCase;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java
index b313a25..30409ff 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 import mvm.rya.api.domain.RyaStatement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java b/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java
index 26ca01c..db377e1 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/utils/RdfIOTest.java
@@ -1,24 +1,24 @@
-//package mvm.rya.api.utils;
-
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.rya.api.utils;
+
 //
 //import junit.framework.TestCase;
 //import mvm.rya.api.RdfCloudTripleStoreUtils;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/pom.xml
----------------------------------------------------------------------
diff --git a/common/rya.provenance/pom.xml b/common/rya.provenance/pom.xml
index 4bd24cc..b9f749c 100644
--- a/common/rya.provenance/pom.xml
+++ b/common/rya.provenance/pom.xml
@@ -1,39 +1,53 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
     <modelVersion>4.0.0</modelVersion>
-     <parent>
-        <groupId>mvm.rya</groupId>
+    <parent>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.common</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.provenance</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <properties>
-    </properties>
-    <dependencies>
-            <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-api</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-log4j12</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
+    <name>Apache Rya Provenance</name>
 
-         <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
- 
- 
+    <dependencies>
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-runtime</artifactId>
         </dependency>
 
-     </dependencies>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+        </dependency>
 
- 
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java
index 09111cf..2ef279a 100644
--- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java
+++ b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/LoggingProvenanceCollector.java
@@ -1,25 +1,26 @@
 package mvm.rya.rdftriplestore.provenance;
 
 /*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.log4j.Logger;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java
index 136c100..a0bd896 100644
--- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java
+++ b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollectionException.java
@@ -1,5 +1,25 @@
 package mvm.rya.rdftriplestore.provenance;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import org.openrdf.repository.RepositoryException;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java
index efb974e..a4ff829 100644
--- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java
+++ b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/ProvenanceCollector.java
@@ -1,5 +1,25 @@
 package mvm.rya.rdftriplestore.provenance;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 /**
  *  Collects/records provenance data
  */

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java
index 38f026b..e283f2c 100644
--- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java
+++ b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollector.java
@@ -1,5 +1,25 @@
 package mvm.rya.rdftriplestore.provenance;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 
 import mvm.rya.rdftriplestore.provenance.rdf.BaseProvenanceModel;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java
index bced532..b8c5f32 100644
--- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java
+++ b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModel.java
@@ -1,5 +1,25 @@
 package mvm.rya.rdftriplestore.provenance.rdf;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java
index 189320e..c5495f2 100644
--- a/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java
+++ b/common/rya.provenance/src/main/java/mvm/rya/rdftriplestore/provenance/rdf/RDFProvenanceModel.java
@@ -1,5 +1,25 @@
 package mvm.rya.rdftriplestore.provenance.rdf;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 
 import org.openrdf.model.Statement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java b/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java
index 326ef01..c431468 100644
--- a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java
+++ b/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/TriplestoreProvenanceCollectorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.rdftriplestore.provenance;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.assertTrue;
 
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java
----------------------------------------------------------------------
diff --git a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java b/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java
index 33f7f64..99875e2 100644
--- a/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java
+++ b/common/rya.provenance/src/test/java/mvm/rya/rdftriplestore/provenance/rdf/BaseProvenanceModelTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.rdftriplestore.provenance.rdf;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.assertTrue;
 
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/pom.xml
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/pom.xml b/dao/accumulo.rya/pom.xml
index d57cb5e..5328945 100644
--- a/dao/accumulo.rya/pom.xml
+++ b/dao/accumulo.rya/pom.xml
@@ -1,109 +1,74 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.dao</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
 
     <artifactId>accumulo.rya</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <properties>
-
-    </properties>
+    <name>Apache Rya Accumulo DAO</name>
 
     <dependencies>
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>rya.api</artifactId>
         </dependency>
         
         <!-- Accumulo deps -->
         <dependency>
-            <groupId>org.apache.zookeeper</groupId>
-            <artifactId>zookeeper</artifactId>
-            <exclusions>
-                <!-- the log4j that comes with zookeeper 3.3.5 has some bad dependencies -->
-                <exclusion>
-                    <groupId>javax.jms</groupId>
-                    <artifactId>jms</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jdmk</groupId>
-                    <artifactId>jmxtools</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jmx</groupId>
-                    <artifactId>jmxri</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
+            <groupId>org.apache.accumulo</groupId>
+            <artifactId>accumulo-core</artifactId>
         </dependency>
+
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-rio-ntriples</artifactId>
-            <version>${openrdf.sesame.version}</version>
         </dependency>
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-rio-nquads</artifactId>
-            <version>${openrdf.sesame.version}</version>
         </dependency>
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-queryalgebra-evaluation</artifactId>
-            <version>${openrdf.sesame.version}</version>
         </dependency>
+
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-rio-trig</artifactId>
-            <version>${openrdf.sesame.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
             <scope>test</scope>
         </dependency>
     </dependencies>
     <profiles>
         <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>accumulo.iterators</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                    <optional>true</optional>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>cloudbase.iterators</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
             <id>mr</id>
             <build>
                 <plugins>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java
index ba3ded3..ebca6a2 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloNamespaceTableIterator.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import info.aduna.iteration.CloseableIteration;
 import mvm.rya.api.persist.RdfDAOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
index 61d1b04..147228b 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.List;
 
 import mvm.rya.accumulo.experimental.AccumuloIndexer;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java
index aa5157b..1ec57a7 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConstants.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java
index 65fad20..a3e0677 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfEvalStatsDAO.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static com.google.common.base.Preconditions.checkNotNull;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java
index 13ae37f..d13f50e 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfQueryIterator.java
@@ -1,24 +1,24 @@
-//package mvm.rya.accumulo;
-
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.rya.accumulo;
+
 //
 //import com.google.common.collect.Iterators;
 //import com.google.common.io.ByteArrayDataInput;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java
index d3b651f..157fc5a 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfUtils.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 import mvm.rya.api.resolver.triple.TripleRow;
 import org.apache.accumulo.core.client.AccumuloException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
index 50744dd..764ca80 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static com.google.common.base.Preconditions.checkNotNull;
 import static mvm.rya.accumulo.AccumuloRdfConstants.ALL_AUTHORIZATIONS;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java
index 3e51ab5..b5a4e84 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/DefineTripleQueryRangeFactory.java
@@ -1,24 +1,24 @@
-//package mvm.rya.accumulo;
-
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.rya.accumulo;
+
 //
 //import com.google.common.io.ByteArrayDataOutput;
 //import com.google.common.io.ByteStreams;
@@ -149,4 +149,4 @@
 //        return new CustomEntry<TABLE_LAYOUT, Range>(tableLayout, range);
 //    }
 //
-//}
\ No newline at end of file
+//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java
index ee217be..574029e 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableKeyValues.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE;
 
 import java.io.IOException;
@@ -111,4 +112,4 @@ public class RyaTableKeyValues {
                 ", o=" + osp +
                 '}';
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java
index 094da63..0dbafc1 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/RyaTableMutationsFactory.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV;
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_VALUE;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT;
@@ -98,4 +99,4 @@ public class RyaTableMutationsFactory {
         mutation.put(cfText, cqText, cv, timestamp, v);
         return mutation;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java
index 2244f66..5df5da9 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AbstractAccumuloIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.accumulo.experimental;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.IOException;
 import java.util.Collection;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java
index 1bd75ba..2329831 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/experimental/AccumuloIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.accumulo.experimental;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.IOException;
 
 import mvm.rya.api.persist.index.RyaSecondaryIndexer;
@@ -10,4 +30,4 @@ public interface AccumuloIndexer extends RyaSecondaryIndexer {
     
     public void setMultiTableBatchWriter(MultiTableBatchWriter writer) throws IOException;
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java
index a148e6b..000c08a 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/AbstractAccumuloMRTool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.accumulo.AccumuloRdfConstants;
 import mvm.rya.accumulo.mr.utils.AccumuloHDFSFileInputFormat;
 import mvm.rya.accumulo.mr.utils.MRUtils;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java
index 0818b5a..ee1004d 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountTool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.eval;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.Date;
 
@@ -254,4 +255,4 @@ public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool
         }
 
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java
index 8f6184b..c3ddcfd 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/BulkNtripsInputTool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.fileinput;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static com.google.common.base.Preconditions.checkNotNull;
 import static mvm.rya.accumulo.AccumuloRdfUtils.extractValue;
 import static mvm.rya.accumulo.AccumuloRdfUtils.from;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java
index 7916cdb..5a872a0 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputByLineTool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.fileinput;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.io.StringReader;
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java
index ba88a7e..f20dfe3 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputFormat.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.fileinput;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java
index 789ea88..673d65f 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputTool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.fileinput;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.accumulo.AccumuloRdfConstants.EMPTY_CV;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java
index 68392b5..89f0aa5 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/upgrade/Upgrade322Tool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.upgrade;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.accumulo.mr.AbstractAccumuloMRTool;
 import mvm.rya.accumulo.mr.utils.MRUtils;
 import org.apache.accumulo.core.client.IteratorSetting;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java
index 399cf45..c9dac6b 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloHDFSFileInputFormat.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.utils;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java
index f98bf7f..2b89440 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/AccumuloProps.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.utils;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 
 import org.apache.accumulo.core.client.Instance;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java
index 6d53172..c3003d3 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/mr/utils/MRUtils.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.utils;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.hadoop.conf.Configuration;
 import org.openrdf.model.URI;
 import org.openrdf.model.ValueFactory;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
index 2b6f182..1d0d9c9 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.query;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.api.RdfCloudTripleStoreUtils.layoutToTable;
 import info.aduna.iteration.CloseableIteration;
 
@@ -50,7 +51,6 @@ import mvm.rya.api.resolver.RyaContext;
 import mvm.rya.api.resolver.RyaTripleContext;
 import mvm.rya.api.resolver.triple.TripleRowRegex;
 import mvm.rya.api.utils.CloseableIterableIteration;
-import mvm.rya.iterators.LimitingAgeOffFilter;
 
 import org.apache.accumulo.core.client.BatchScanner;
 import org.apache.accumulo.core.client.Connector;
@@ -61,6 +61,7 @@ import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.iterators.user.RegExFilter;
+import org.apache.accumulo.core.iterators.user.TimestampFilter;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.hadoop.io.Text;
 import org.calrissian.mango.collect.CloseableIterable;
@@ -373,10 +374,11 @@ public class AccumuloRyaQueryEngine implements RyaQueryEngine<AccumuloRdfConfigu
             scanner.addScanIterator(setting);
         }
         if (ttl != null) {
-            IteratorSetting setting = new IteratorSetting(9, "fi", LimitingAgeOffFilter.class.getName());
-            setting.addOption(LimitingAgeOffFilter.TTL, Long.toString(ttl));
+            IteratorSetting setting = new IteratorSetting(9, "fi", TimestampFilter.class.getName());
+            TimestampFilter.setStart(setting,  System.currentTimeMillis() - ttl, true);
             if(currentTime != null){
-                setting.addOption(LimitingAgeOffFilter.CURRENT_TIME, Long.toString(currentTime));
+                TimestampFilter.setStart(setting, currentTime - ttl, true);
+                TimestampFilter.setEnd(setting, currentTime, true);
             }
             scanner.addScanIterator(setting);
         }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java
index 553624a..2813438 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.query;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Map;
 
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;



[52/56] [abbrv] incubator-rya git commit: RYA-14 Updated to Java 1.7; Minor geo issue

Posted by mi...@apache.org.
RYA-14 Updated to Java 1.7; Minor geo issue

Also fixed a minor geo issue with RYA-13 commit.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/990f1ffe
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/990f1ffe
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/990f1ffe

Branch: refs/heads/master
Commit: 990f1ffe272fe13b99367528fb8bb661f8e29f45
Parents: e5e227c
Author: Aaron Mihalik <mi...@alum.mit.edu>
Authored: Fri Dec 4 18:08:53 2015 -0500
Committer: Aaron Mihalik <mi...@alum.mit.edu>
Committed: Fri Dec 4 18:08:53 2015 -0500

----------------------------------------------------------------------
 .../mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java  |  2 +-
 .../mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java     |  3 ++-
 pom.xml                                                   | 10 ++--------
 3 files changed, 5 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/990f1ffe/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
index c8b5b4a..9d01751 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
@@ -1,4 +1,4 @@
-package mvm.rya.indexing.accumulo.geo;
+package mvm.rya.indexing.accumulo.geo;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/990f1ffe/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
index c204f3c..25db123 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
@@ -237,7 +237,8 @@ public class GeoIndexerTest {
             Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
 
             // test a ring for the whole world and make sure the point is gone
-            double[] world = { -180, 90, 180, 90, -180, 90, -180, -90, -180, 90 };
+            // Geomesa is a little sensitive around lon 180, so we only go to 179
+            double[] world = { -180, 90, 179, 90, 179, -90, -180, -90, -180, 90 };
             LinearRing rWorld = gf.createLinearRing(new PackedCoordinateSequence.Double(world, 2));
             Polygon pWorld = gf.createPolygon(rWorld, new LinearRing[] {});
             Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pWorld, EMPTY_CONSTRAINTS)));

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/990f1ffe/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5f2164f..dce799f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -500,6 +500,8 @@ under the License.
                     <artifactId>maven-compiler-plugin</artifactId>
                     <configuration>
                         <encoding>${project.build.sourceEncoding}</encoding>
+                        <source>1.7</source>
+                        <target>1.7</target>
                     </configuration>
                 </plugin>
                 <plugin>
@@ -649,14 +651,6 @@ under the License.
             </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <configuration>
-                    <source>1.6</source>
-                    <target>1.6</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-enforcer-plugin</artifactId>
                 <executions>
                     <execution>


[07/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java
new file mode 100644
index 0000000..87854ac
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java
@@ -0,0 +1,50 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.TupleExpr;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Class InferJoin
+ * Date: Apr 16, 2011
+ * Time: 7:29:40 AM
+ */
+public class InferJoin extends Join {
+
+    private Map<String, String> properties = new HashMap<String, String>();
+
+    public InferJoin() {
+    }
+
+    public InferJoin(TupleExpr leftArg, TupleExpr rightArg) {
+        super(leftArg, rightArg);
+    }
+
+    public Map<String, String> getProperties() {
+        return properties;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java
new file mode 100644
index 0000000..4d229d0
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java
@@ -0,0 +1,48 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.Union;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Class InferUnion
+ * Date: Mar 14, 2012
+ * Time: 12:43:49 PM
+ */
+public class InferUnion extends Union {
+    private Map<String, String> properties = new HashMap<String, String>();
+
+    public InferUnion() {
+    }
+
+    public InferUnion(TupleExpr leftArg, TupleExpr rightArg) {
+        super(leftArg, rightArg);
+    }
+
+    public Map<String, String> getProperties() {
+        return properties;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java
new file mode 100644
index 0000000..f4ed420
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java
@@ -0,0 +1,410 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import com.tinkerpop.blueprints.Direction;
+import com.tinkerpop.blueprints.Edge;
+import com.tinkerpop.blueprints.Graph;
+import com.tinkerpop.blueprints.Vertex;
+import com.tinkerpop.blueprints.impls.tg.TinkerGraphFactory;
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RyaDAO;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.utils.RyaDAOHelper;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.QueryEvaluationException;
+
+import java.util.*;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Will pull down inference relationships from dao every x seconds. <br>
+ * Will infer extra relationships. <br>
+ * Will cache relationships in Graph for later use. <br>
+ */
+public class InferenceEngine {
+
+    private Graph subClassOfGraph;
+    private Graph subPropertyOfGraph;
+    private Set<URI> symmetricPropertySet;
+    private Map<URI, URI> inverseOfMap;
+    private Set<URI> transitivePropertySet;
+
+    private RyaDAO ryaDAO;
+    private RdfCloudTripleStoreConfiguration conf;
+    private boolean initialized = false;
+    private boolean schedule = true;
+
+    private long refreshGraphSchedule = 5 * 60 * 1000; //5 min
+    private Timer timer;
+    public static final String URI_PROP = "uri";
+
+    public void init() throws InferenceEngineException {
+        try {
+            if (isInitialized()) {
+                return;
+            }
+
+            checkNotNull(conf, "Configuration is null");
+            checkNotNull(ryaDAO, "RdfDao is null");
+            checkArgument(ryaDAO.isInitialized(), "RdfDao is not initialized");
+
+            if (schedule) {
+                timer = new Timer(InferenceEngine.class.getName());
+                timer.scheduleAtFixedRate(new TimerTask() {
+
+                    @Override
+                    public void run() {
+                        try {
+                            refreshGraph();
+                        } catch (InferenceEngineException e) {
+                            throw new RuntimeException(e);
+                        }
+                    }
+
+                }, refreshGraphSchedule, refreshGraphSchedule);
+            }
+            refreshGraph();
+            setInitialized(true);
+        } catch (RyaDAOException e) {
+            throw new InferenceEngineException(e);
+        }
+    }
+
+    public void destroy() throws InferenceEngineException {
+        setInitialized(false);
+        if (timer != null) {
+            timer.cancel();
+        }
+    }
+
+    public void refreshGraph() throws InferenceEngineException {
+        try {
+            //get all subclassof
+            Graph graph = TinkerGraphFactory.createTinkerGraph();
+            CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null,
+                    RDFS.SUBCLASSOF, null, conf);
+            try {
+                while (iter.hasNext()) {
+                    String edgeName = RDFS.SUBCLASSOF.stringValue();
+                    Statement st = iter.next();
+                    addStatementEdge(graph, edgeName, st);
+                }
+            } finally {
+                if (iter != null) {
+                    iter.close();
+                }
+            }
+
+            subClassOfGraph = graph; //TODO: Should this be synchronized?
+
+            graph = TinkerGraphFactory.createTinkerGraph();
+
+            iter = RyaDAOHelper.query(ryaDAO, null,
+                    RDFS.SUBPROPERTYOF, null, conf);
+            try {
+                while (iter.hasNext()) {
+                    String edgeName = RDFS.SUBPROPERTYOF.stringValue();
+                    Statement st = iter.next();
+                    addStatementEdge(graph, edgeName, st);
+                }
+            } finally {
+                if (iter != null) {
+                    iter.close();
+                }
+            }
+
+            //equiv property really is the same as a subPropertyOf both ways
+            iter = RyaDAOHelper.query(ryaDAO, null, OWL.EQUIVALENTPROPERTY, null, conf);
+            try {
+                while (iter.hasNext()) {
+                    String edgeName = RDFS.SUBPROPERTYOF.stringValue();
+                    Statement st = iter.next();
+                    addStatementEdge(graph, edgeName, st);
+                    //reverse is also true
+                    addStatementEdge(graph, edgeName, new StatementImpl((Resource) st.getObject(), st.getPredicate(), st.getSubject()));
+                }
+            } finally {
+                if (iter != null) {
+                    iter.close();
+                }
+            }
+
+            subPropertyOfGraph = graph; //TODO: Should this be synchronized?
+
+            iter = RyaDAOHelper.query(ryaDAO, null, RDF.TYPE, OWL.SYMMETRICPROPERTY, conf);
+            Set<URI> symProp = new HashSet();
+            try {
+                while (iter.hasNext()) {
+                    Statement st = iter.next();
+                    symProp.add((URI) st.getSubject()); //safe to assume it is a URI?
+                }
+            } finally {
+                if (iter != null) {
+                    iter.close();
+                }
+            }
+            symmetricPropertySet = symProp;
+
+            iter = RyaDAOHelper.query(ryaDAO, null, RDF.TYPE, OWL.TRANSITIVEPROPERTY, conf);
+            Set<URI> transProp = new HashSet();
+            try {
+                while (iter.hasNext()) {
+                    Statement st = iter.next();
+                    transProp.add((URI) st.getSubject());
+                }
+            } finally {
+                if (iter != null) {
+                    iter.close();
+                }
+            }
+            transitivePropertySet = transProp;
+
+            iter = RyaDAOHelper.query(ryaDAO, null, OWL.INVERSEOF, null, conf);
+            Map<URI, URI> invProp = new HashMap();
+            try {
+                while (iter.hasNext()) {
+                    Statement st = iter.next();
+                    invProp.put((URI) st.getSubject(), (URI) st.getObject());
+                    invProp.put((URI) st.getObject(), (URI) st.getSubject());
+                }
+            } finally {
+                if (iter != null) {
+                    iter.close();
+                }
+            }
+            inverseOfMap = invProp;
+        } catch (QueryEvaluationException e) {
+            throw new InferenceEngineException(e);
+        }
+    }
+
+    protected void addStatementEdge(Graph graph, String edgeName, Statement st) {
+        Resource subj = st.getSubject();
+        Vertex a = graph.getVertex(subj);
+        if (a == null) {
+            a = graph.addVertex(subj);
+            a.setProperty(URI_PROP, subj);
+        }
+        Value obj = st.getObject();
+        Vertex b = graph.getVertex(obj);
+        if (b == null) {
+            b = graph.addVertex(obj);
+            b.setProperty(URI_PROP, obj);
+        }
+        graph.addEdge(null, a, b, edgeName);
+    }
+
+    public Set<URI> findParents(Graph graph, URI vertexId) {
+        Set<URI> parents = new HashSet();
+        if (graph == null) {
+            return parents;
+        }
+        Vertex v = graph.getVertex(vertexId);
+        if (v == null) {
+            return parents;
+        }
+        addParents(v, parents);
+        return parents;
+    }
+
+    private static void addParents(Vertex v, Set<URI> parents) {
+        for (Edge edge : v.getEdges(Direction.IN)) {
+            Vertex ov = edge.getVertex(Direction.OUT);
+            Object o = ov.getProperty(URI_PROP);
+            if (o != null && o instanceof URI) {
+                boolean contains = parents.contains(o);
+                if (!contains) {
+                    parents.add((URI) o);
+                    addParents(ov, parents);
+                }
+            }
+
+        }
+    }
+
+    public boolean isSymmetricProperty(URI prop) {
+        return (symmetricPropertySet != null) && symmetricPropertySet.contains(prop);
+    }
+
+    public URI findInverseOf(URI prop) {
+        return (inverseOfMap != null) ? inverseOfMap.get(prop) : (null);
+    }
+
+    public boolean isTransitiveProperty(URI prop) {
+        return (transitivePropertySet != null) && transitivePropertySet.contains(prop);
+    }
+
+    /**
+     * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go
+     */
+    public Set<Statement> findTransitiveProperty(Resource subj, URI prop, Value obj, Resource... contxts) throws InferenceEngineException {
+        if (transitivePropertySet.contains(prop)) {
+            Set<Statement> sts = new HashSet();
+            boolean goUp = subj == null;
+            chainTransitiveProperty(subj, prop, obj, (goUp) ? (obj) : (subj), sts, goUp, contxts);
+            return sts;
+        } else
+            return null;
+    }
+
+    /**
+     * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go
+     */
+    public Set<Resource> findSameAs(Resource value, Resource... contxts) throws InferenceEngineException{
+		Set<Resource> sameAs = new HashSet<Resource>();
+		sameAs.add(value);
+		findSameAsChaining(value, sameAs, contxts);
+		return sameAs;
+    }
+
+    /**
+     * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go
+     */
+    public void findSameAsChaining(Resource subj, Set<Resource> currentSameAs, Resource[] contxts) throws InferenceEngineException{
+        try {
+			CloseableIteration<Statement, QueryEvaluationException> subjIter = RyaDAOHelper.query(ryaDAO, subj, OWL.SAMEAS, null, conf, contxts);
+			while (subjIter.hasNext()){
+				Statement st = subjIter.next();
+				if (!currentSameAs.contains(st.getObject())){
+					Resource castedObj = (Resource) st.getObject();
+					currentSameAs.add(castedObj);
+					findSameAsChaining(castedObj, currentSameAs, contxts);
+				}
+			}
+			subjIter.close();
+			CloseableIteration<Statement, QueryEvaluationException> objIter = RyaDAOHelper.query(ryaDAO, null, OWL.SAMEAS, subj, conf, contxts);
+			while (objIter.hasNext()){
+				Statement st = objIter.next();
+				if (!currentSameAs.contains(st.getSubject())){
+					Resource sameAsSubj = st.getSubject();
+					currentSameAs.add(sameAsSubj);
+					findSameAsChaining(sameAsSubj, currentSameAs, contxts);
+				}
+			}
+			objIter.close();
+		} catch (QueryEvaluationException e) {
+			throw new InferenceEngineException(e);
+		}
+
+    }
+
+    protected void chainTransitiveProperty(Resource subj, URI prop, Value obj, Value core, Set<Statement> sts, boolean goUp, Resource[] contxts) throws InferenceEngineException {
+        try {
+            CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, subj, prop, obj, conf, contxts);
+            while (iter.hasNext()) {
+                Statement st = iter.next();
+                sts.add(new StatementImpl((goUp) ? (st.getSubject()) : (Resource) (core), prop, (!goUp) ? (st.getObject()) : (core)));
+                if (goUp) {
+                    chainTransitiveProperty(null, prop, st.getSubject(), core, sts, goUp, contxts);
+                } else {
+                    chainTransitiveProperty((Resource) st.getObject(), prop, null, core, sts, goUp, contxts);
+                }
+            }
+            iter.close();
+        } catch (QueryEvaluationException e) {
+            throw new InferenceEngineException(e);
+        }
+    }
+
+    public boolean isInitialized() {
+        return initialized;
+    }
+
+    public void setInitialized(boolean initialized) {
+        this.initialized = initialized;
+    }
+
+    public RyaDAO getRyaDAO() {
+        return ryaDAO;
+    }
+
+    public void setRyaDAO(RyaDAO ryaDAO) {
+        this.ryaDAO = ryaDAO;
+    }
+
+    public RdfCloudTripleStoreConfiguration getConf() {
+        return conf;
+    }
+
+    public void setConf(RdfCloudTripleStoreConfiguration conf) {
+        this.conf = conf;
+    }
+
+    public Graph getSubClassOfGraph() {
+        return subClassOfGraph;
+    }
+
+    public Graph getSubPropertyOfGraph() {
+        return subPropertyOfGraph;
+    }
+
+    public long getRefreshGraphSchedule() {
+        return refreshGraphSchedule;
+    }
+
+    public void setRefreshGraphSchedule(long refreshGraphSchedule) {
+        this.refreshGraphSchedule = refreshGraphSchedule;
+    }
+
+    public Set<URI> getSymmetricPropertySet() {
+        return symmetricPropertySet;
+    }
+
+    public void setSymmetricPropertySet(Set<URI> symmetricPropertySet) {
+        this.symmetricPropertySet = symmetricPropertySet;
+    }
+
+    public Map<URI, URI> getInverseOfMap() {
+        return inverseOfMap;
+    }
+
+    public void setInverseOfMap(Map<URI, URI> inverseOfMap) {
+        this.inverseOfMap = inverseOfMap;
+    }
+
+    public Set<URI> getTransitivePropertySet() {
+        return transitivePropertySet;
+    }
+
+    public void setTransitivePropertySet(Set<URI> transitivePropertySet) {
+        this.transitivePropertySet = transitivePropertySet;
+    }
+
+    public boolean isSchedule() {
+        return schedule;
+    }
+
+    public void setSchedule(boolean schedule) {
+        this.schedule = schedule;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java
new file mode 100644
index 0000000..4fc94e3
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java
@@ -0,0 +1,43 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+/**
+ * Date: 7/20/12
+ * Time: 11:03 AM
+ */
+public class InferenceEngineException extends Exception {
+    public InferenceEngineException() {
+    }
+
+    public InferenceEngineException(String s) {
+        super(s);
+    }
+
+    public InferenceEngineException(String s, Throwable throwable) {
+        super(s, throwable);
+    }
+
+    public InferenceEngineException(Throwable throwable) {
+        super(throwable);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java
new file mode 100644
index 0000000..6f8004c
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java
@@ -0,0 +1,80 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import org.openrdf.model.URI;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.model.vocabulary.SESAME;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Union;
+import org.openrdf.query.algebra.Var;
+
+/**
+ * All predicates are changed
+ * Class SubPropertyOfVisitor
+ * Date: Mar 29, 2011
+ * Time: 11:28:34 AM
+ */
+public class InverseOfVisitor extends AbstractInferVisitor {
+
+    public InverseOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        super(conf, inferenceEngine);
+        include = conf.isInferInverseOf();
+    }
+
+    @Override
+    protected void meetSP(StatementPattern node) throws Exception {
+        StatementPattern sp = node.clone();
+        final Var predVar = sp.getPredicateVar();
+
+        URI pred = (URI) predVar.getValue();
+        String predNamespace = pred.getNamespace();
+
+        final Var objVar = sp.getObjectVar();
+        final Var cntxtVar = sp.getContextVar();
+        if (objVar != null &&
+                !RDF.NAMESPACE.equals(predNamespace) &&
+                !SESAME.NAMESPACE.equals(predNamespace) &&
+                !RDFS.NAMESPACE.equals(predNamespace)
+                && !EXPANDED.equals(cntxtVar)) {
+            /**
+             *
+             * { ?a ?pred ?b .}\n" +
+             "       UNION " +
+             "      { ?b ?pred ?a }
+             */
+
+            URI predUri = (URI) predVar.getValue();
+            URI invPropUri = inferenceEngine.findInverseOf(predUri);
+            if (invPropUri != null) {
+                Var subjVar = sp.getSubjectVar();
+                Union union = new InferUnion();
+                union.setLeftArg(sp);
+                union.setRightArg(new StatementPattern(objVar, new Var(predVar.getName(), invPropUri), subjVar, cntxtVar));
+                node.replaceWith(union);
+            }
+        }
+    }
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java
new file mode 100644
index 0000000..d035026
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java
@@ -0,0 +1,187 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.utils.NullableStatementImpl;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+import org.openrdf.model.Resource;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.model.vocabulary.SESAME;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ * All predicates are changed
+ * Class SubPropertyOfVisitor
+ * Date: Mar 29, 2011
+ * Time: 11:28:34 AM
+ */
+public class SameAsVisitor extends AbstractInferVisitor {
+
+    public SameAsVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        super(conf, inferenceEngine);
+        include = conf.isInferSubPropertyOf(); // oops
+    }
+    
+    public void meet(StatementPattern sp) throws Exception {
+        if (!include) {
+            return;
+        }
+        if (sp instanceof FixedStatementPattern || sp instanceof TransitivePropertySP || sp instanceof DoNotExpandSP) {
+            return;   //already inferred somewhere else
+        }
+        final Var predVar = sp.getPredicateVar();
+        //do not know when things are null
+        if (predVar == null) {
+            return;
+        }
+        meetSP(sp);
+    }
+
+    @Override
+    protected void meetSP(StatementPattern node) throws Exception {
+        StatementPattern sp = node.clone();
+        final Var predVar = sp.getPredicateVar();
+
+        boolean shouldExpand = true;
+        if (predVar.hasValue()){
+            URI pred = (URI) predVar.getValue();
+            String predNamespace = pred.getNamespace();
+            shouldExpand = !pred.equals(OWL.SAMEAS) && 
+            !RDF.NAMESPACE.equals(predNamespace) &&
+            !SESAME.NAMESPACE.equals(predNamespace) &&
+            !RDFS.NAMESPACE.equals(predNamespace);
+        }
+
+        final Var objVar = sp.getObjectVar();
+        final Var subjVar = sp.getSubjectVar();
+        final Var cntxtVar = sp.getContextVar();
+        if (shouldExpand
+                && !EXPANDED.equals(cntxtVar) && !(objVar == null) && !(subjVar == null)){
+            if (objVar.getValue() == null) {
+            	Value subjVarValue = subjVar.getValue();
+            	if (subjVarValue instanceof Resource){
+            		Set<Resource> uris = inferenceEngine.findSameAs((Resource)subjVar.getValue(), getVarValue(cntxtVar));
+            		if (uris.size() > 1){
+            			InferJoin join = getReplaceJoin(uris, true, subjVar, objVar, predVar, cntxtVar);
+            			node.replaceWith(join);  
+            		}
+            	}
+            }
+            else if (subjVar.getValue() == null) {
+            	Value objVarValue = objVar.getValue();
+            	if (objVarValue instanceof Resource){
+            		Set<Resource> uris = inferenceEngine.findSameAs((Resource)objVar.getValue(), getVarValue(cntxtVar));
+                	if (uris.size() > 1){
+                        InferJoin join = getReplaceJoin(uris, false, subjVar, objVar, predVar, cntxtVar);
+                        node.replaceWith(join);  
+                	}
+            	}  	
+            }
+            else {
+            	// both subj and pred are set and should be expanded
+            	Set<Resource> subjURIs = new HashSet<Resource>();
+            	Set<Resource> objURIs = new HashSet<Resource>();
+            	// TODO I don't like these checks -- is there a better way to do this?
+            	Value objVarValue = objVar.getValue();
+           	    if (objVarValue instanceof Resource){
+           	    	objURIs = inferenceEngine.findSameAs((Resource)objVar.getValue(), getVarValue(cntxtVar));
+            	}
+            	Value subjVarValue = subjVar.getValue();
+            	if (subjVarValue instanceof Resource){
+            		subjURIs = inferenceEngine.findSameAs((Resource)subjVar.getValue(), getVarValue(cntxtVar));
+            	}
+            	InferJoin finalJoin = null;
+            	// expand subj first
+            	if (subjURIs.size() > 1){
+            		finalJoin = getReplaceJoin(subjURIs, true, subjVar, objVar, predVar, cntxtVar);
+            	}
+            	// now expand the obj
+            	if (objURIs.size() > 1){
+            		// if we already expanded the subj
+            		if (finalJoin != null){
+            			// we know what this is since we created it
+            			DoNotExpandSP origStatement = (DoNotExpandSP) finalJoin.getRightArg();
+            	        String s = UUID.randomUUID().toString();
+            	        Var dummyVar = new Var(s);
+            			StatementPattern origDummyStatement = new DoNotExpandSP(origStatement.getSubjectVar(), origStatement.getPredicateVar(), dummyVar, cntxtVar);
+            	        FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), objVar, cntxtVar);
+            	        for (Resource sameAs : objURIs){
+            	    		NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)objVar.getValue(), getVarValue(cntxtVar));
+            	            fsp.statements.add(newStatement);        		
+            	    	}
+            	        InferJoin interimJoin = new InferJoin(fsp, origDummyStatement);
+            	        finalJoin = new InferJoin(finalJoin.getLeftArg(), interimJoin);
+            		}
+            		else {
+            			finalJoin = getReplaceJoin(objURIs, false, subjVar, objVar, predVar, cntxtVar);
+            		}
+            		
+            	}
+            	if (finalJoin != null){
+            	    node.replaceWith(finalJoin);
+            	}
+            }
+        }
+    }
+    
+    private InferJoin getReplaceJoin(Set<Resource> uris, boolean subSubj, Var subjVar, Var objVar, Var predVar, Var cntxtVar){
+        String s = UUID.randomUUID().toString();
+        Var dummyVar = new Var(s);
+        StatementPattern origStatement;
+        Var subVar;
+        if (subSubj){
+        	subVar = subjVar;
+        	origStatement = new DoNotExpandSP(dummyVar, predVar, objVar, cntxtVar);
+        }
+        else {
+        	subVar = objVar;
+        	origStatement = new DoNotExpandSP(subjVar, predVar, dummyVar, cntxtVar);
+       }
+        FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), subVar, cntxtVar);
+        for (Resource sameAs : uris){
+    		NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)subVar.getValue(), getVarValue(cntxtVar));
+            fsp.statements.add(newStatement);        		
+    	}
+        InferJoin join = new InferJoin(fsp, origStatement);
+        join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
+       return join;
+    }
+    
+    protected Resource getVarValue(Var var) {
+        if (var == null)
+            return null;
+        else
+            return (Resource)var.getValue();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java
new file mode 100644
index 0000000..664b5af
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java
@@ -0,0 +1,108 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.utils.NullableStatementImpl;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import org.openrdf.model.URI;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+import java.util.Collection;
+import java.util.UUID;
+
+/**
+ * Class SubClassOfVisitor
+ * Date: Mar 29, 2011
+ * Time: 11:28:34 AM
+ */
+public class SubClassOfVisitor extends AbstractInferVisitor {
+
+    public SubClassOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        super(conf, inferenceEngine);
+        include = conf.isInferSubClassOf();
+    }
+
+    @Override
+    protected void meetSP(StatementPattern node) throws Exception {
+        StatementPattern sp = node.clone();
+        final Var predVar = sp.getPredicateVar();
+        final Var objVar = sp.getObjectVar();
+        final Var conVar = sp.getContextVar();
+        if (predVar != null && objVar != null && objVar.getValue() != null && RDF.TYPE.equals(predVar.getValue())
+                && !EXPANDED.equals(conVar)) {
+            /**
+             * ?type sesame:directSubClassOf ub:Student . ?student rdf:type ?type +
+             */
+//            String s = UUID.randomUUID().toString();
+//            Var typeVar = new Var(s);
+//            StatementPattern subClassOf = new StatementPattern(typeVar, new Var("c-" + s, SESAME.DIRECTSUBCLASSOF), objVar, SUBCLASS_EXPANDED);
+//            StatementPattern rdfType = new StatementPattern(sp.getSubjectVar(), sp.getPredicateVar(), typeVar, SUBCLASS_EXPANDED);
+//            InferJoin join = new InferJoin(subClassOf, rdfType);
+//            join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
+//            node.replaceWith(join);
+
+            URI subclassof_uri = (URI) objVar.getValue();
+            Collection<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), subclassof_uri);
+            if (parents != null && parents.size() > 0) {
+                String s = UUID.randomUUID().toString();
+                Var typeVar = new Var(s);
+                FixedStatementPattern fsp = new FixedStatementPattern(typeVar, new Var("c-" + s, RDFS.SUBCLASSOF), objVar, conVar);
+                fsp.statements.add(new NullableStatementImpl(subclassof_uri, RDFS.SUBCLASSOF, subclassof_uri));
+                for (URI u : parents) {
+                    fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBCLASSOF, subclassof_uri));
+                }
+
+                StatementPattern rdfType = new DoNotExpandSP(sp.getSubjectVar(), sp.getPredicateVar(), typeVar, conVar);
+                InferJoin join = new InferJoin(fsp, rdfType);
+                join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
+                node.replaceWith(join);
+            }
+
+//            if (parents != null && parents.size() > 0) {
+//                StatementPatterns statementPatterns = new StatementPatterns();
+//                statementPatterns.patterns.add(node);
+//                Var subjVar = node.getSubjectVar();
+//                for (URI u : parents) {
+//                    statementPatterns.patterns.add(new StatementPattern(subjVar, predVar, new Var(objVar.getName(), u)));
+//                }
+//                node.replaceWith(statementPatterns);
+//            }
+
+//            if (parents != null && parents.size() > 0) {
+//                VarCollection vc = new VarCollection();
+//                vc.setName(objVar.getName());
+//                vc.values.add(objVar);
+//                for (URI u : parents) {
+//                    vc.values.add(new Var(objVar.getName(), u));
+//                }
+//                Var subjVar = node.getSubjectVar();
+//                node.replaceWith(new StatementPattern(subjVar, predVar, vc, node.getContextVar()));
+//            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java
new file mode 100644
index 0000000..4df45a9
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java
@@ -0,0 +1,121 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.utils.NullableStatementImpl;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import org.openrdf.model.URI;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.model.vocabulary.SESAME;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ * All predicates are changed
+ * Class SubPropertyOfVisitor
+ * Date: Mar 29, 2011
+ * Time: 11:28:34 AM
+ */
+public class SubPropertyOfVisitor extends AbstractInferVisitor {
+
+    public SubPropertyOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        super(conf, inferenceEngine);
+        include = conf.isInferSubPropertyOf();
+    }
+
+    @Override
+    protected void meetSP(StatementPattern node) throws Exception {
+        StatementPattern sp = node.clone();
+        final Var predVar = sp.getPredicateVar();
+
+        URI pred = (URI) predVar.getValue();
+        String predNamespace = pred.getNamespace();
+
+        final Var objVar = sp.getObjectVar();
+        final Var cntxtVar = sp.getContextVar();
+        if (objVar != null &&
+                !RDF.NAMESPACE.equals(predNamespace) &&
+                !SESAME.NAMESPACE.equals(predNamespace) &&
+                !RDFS.NAMESPACE.equals(predNamespace)
+                && !EXPANDED.equals(cntxtVar)) {
+            /**
+             *
+             * { ?subProp rdfs:subPropertyOf ub:worksFor . ?y ?subProp <http://www.Department0.University0.edu> }\n" +
+             "       UNION " +
+             "      { ?y ub:worksFor <http://www.Department0.University0.edu> }
+             */
+//            String s = UUID.randomUUID().toString();
+//            Var subPropVar = new Var(s);
+//            StatementPattern subPropOf = new StatementPattern(subPropVar, new Var("c-" + s, SESAME.DIRECTSUBPROPERTYOF), predVar, EXPANDED);
+//            StatementPattern subPropOf2 = new StatementPattern(sp.getSubjectVar(), subPropVar, objVar, EXPANDED);
+//            InferJoin join = new InferJoin(subPropOf, subPropOf2);
+//            join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
+//            node.replaceWith(join);
+
+//            Collection<URI> parents = inferenceEngine.findParents(inferenceEngine.subPropertyOfGraph, (URI) predVar.getValue());
+//            if (parents != null && parents.size() > 0) {
+//                StatementPatterns statementPatterns = new StatementPatterns();
+//                statementPatterns.patterns.add(node);
+//                Var subjVar = node.getSubjectVar();
+//                for (URI u : parents) {
+//                    statementPatterns.patterns.add(new StatementPattern(subjVar, new Var(predVar.getName(), u), objVar));
+//                }
+//                node.replaceWith(statementPatterns);
+//            }
+//            if (parents != null && parents.size() > 0) {
+//                VarCollection vc = new VarCollection();
+//                vc.setName(predVar.getName());
+//                vc.values.add(predVar);
+//                for (URI u : parents) {
+//                    vc.values.add(new Var(predVar.getName(), u));
+//                }
+//                Var subjVar = node.getSubjectVar();
+//                node.replaceWith(new StatementPattern(subjVar, vc, objVar, node.getContextVar()));
+//            }
+
+            URI subprop_uri = (URI) predVar.getValue();
+            Set<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), subprop_uri);
+            if (parents != null && parents.size() > 0) {
+                String s = UUID.randomUUID().toString();
+                Var typeVar = new Var(s);
+                FixedStatementPattern fsp = new FixedStatementPattern(typeVar, new Var("c-" + s, RDFS.SUBPROPERTYOF), predVar, cntxtVar);
+//                fsp.statements.add(new NullableStatementImpl(subprop_uri, RDFS.SUBPROPERTYOF, subprop_uri));
+                //add self
+                parents.add(subprop_uri);
+                for (URI u : parents) {
+                    fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBPROPERTYOF, subprop_uri));
+                }
+
+                StatementPattern rdfType = new DoNotExpandSP(sp.getSubjectVar(), typeVar, sp.getObjectVar(), cntxtVar);
+                InferJoin join = new InferJoin(fsp, rdfType);
+                join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
+                node.replaceWith(join);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java
new file mode 100644
index 0000000..63c073b
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java
@@ -0,0 +1,78 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import org.openrdf.model.URI;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.model.vocabulary.SESAME;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Union;
+import org.openrdf.query.algebra.Var;
+
+/**
+ * All predicates are changed
+ * Class SubPropertyOfVisitor
+ * Date: Mar 29, 2011
+ * Time: 11:28:34 AM
+ */
+public class SymmetricPropertyVisitor extends AbstractInferVisitor {
+
+    public SymmetricPropertyVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        super(conf, inferenceEngine);
+        include = conf.isInferSymmetricProperty();
+    }
+
+    @Override
+    protected void meetSP(StatementPattern node) throws Exception {
+        StatementPattern sp = node.clone();
+
+        final Var predVar = sp.getPredicateVar();
+        URI pred = (URI) predVar.getValue();
+        String predNamespace = pred.getNamespace();
+
+        final Var objVar = sp.getObjectVar();
+        final Var cntxtVar = sp.getContextVar();
+        if (objVar != null &&
+                !RDF.NAMESPACE.equals(predNamespace) &&
+                !SESAME.NAMESPACE.equals(predNamespace) &&
+                !RDFS.NAMESPACE.equals(predNamespace)
+                && !EXPANDED.equals(cntxtVar)) {
+            /**
+             *
+             * { ?a ?pred ?b .}\n" +
+             "       UNION " +
+             "      { ?b ?pred ?a }
+             */
+
+            URI symmPropUri = (URI) predVar.getValue();
+            if(inferenceEngine.isSymmetricProperty(symmPropUri)) {
+                Var subjVar = sp.getSubjectVar();
+                Union union = new InferUnion();
+                union.setLeftArg(sp);
+                union.setRightArg(new StatementPattern(objVar, predVar, subjVar, cntxtVar));
+                node.replaceWith(union);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java
new file mode 100644
index 0000000..2f795fb
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java
@@ -0,0 +1,69 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+import org.openrdf.model.URI;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.model.vocabulary.SESAME;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+/**
+ * All predicates are changed
+ * Class SubPropertyOfVisitor
+ * Date: Mar 29, 2011
+ * Time: 11:28:34 AM
+ */
+public class TransitivePropertyVisitor extends AbstractInferVisitor {
+
+    public TransitivePropertyVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        super(conf, inferenceEngine);
+        include = conf.isInferTransitiveProperty();
+    }
+
+    @Override
+    protected void meetSP(StatementPattern node) throws Exception {
+        StatementPattern sp = node.clone();
+        final Var predVar = sp.getPredicateVar();
+
+        URI pred = (URI) predVar.getValue();
+        String predNamespace = pred.getNamespace();
+
+        final Var objVar = sp.getObjectVar();
+        final Var cntxtVar = sp.getContextVar();
+        if (objVar != null &&
+                !RDF.NAMESPACE.equals(predNamespace) &&
+                !SESAME.NAMESPACE.equals(predNamespace) &&
+                !RDFS.NAMESPACE.equals(predNamespace)
+                && !EXPANDED.equals(cntxtVar)) {
+
+            URI transPropUri = (URI) predVar.getValue();
+            if (inferenceEngine.isTransitiveProperty(transPropUri)) {
+                node.replaceWith(new TransitivePropertySP(sp.getSubjectVar(), sp.getPredicateVar(), sp.getObjectVar(), sp.getContextVar()));
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java b/sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java
new file mode 100644
index 0000000..dd8e4fa
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java
@@ -0,0 +1,167 @@
+package mvm.rya.rdftriplestore.namespace;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RdfDAOException;
+import mvm.rya.api.persist.RyaDAO;
+import mvm.rya.api.persist.RyaNamespaceManager;
+import net.sf.ehcache.Cache;
+import net.sf.ehcache.CacheManager;
+import net.sf.ehcache.Element;
+import net.sf.ehcache.Statistics;
+import org.openrdf.model.Namespace;
+import org.openrdf.sail.SailException;
+
+import java.io.InputStream;
+
+/**
+ * Class NamespaceManager
+ * Date: Oct 17, 2011
+ * Time: 8:25:33 AM
+ */
+public class NamespaceManager {
+    CacheManager cacheManager;
+    Cache namespaceCache;
+    public static final String NAMESPACE_CACHE_NAME = "namespace";
+    private RdfCloudTripleStoreConfiguration conf;
+    private RyaNamespaceManager namespaceManager;
+
+    public NamespaceManager(RyaDAO ryaDAO, RdfCloudTripleStoreConfiguration conf) {
+        this.conf = conf;
+        initialize(ryaDAO);
+    }
+
+    protected void initialize(RyaDAO ryaDAO) {
+        try {
+            this.namespaceManager = ryaDAO.getNamespaceManager();
+
+            InputStream cacheConfigStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("ehcache.xml");
+            if (cacheConfigStream == null) {
+                this.cacheManager = CacheManager.create();
+//                throw new RuntimeException("Cache Configuration does not exist");
+            } else {
+                this.cacheManager = CacheManager.create(cacheConfigStream);
+            }
+            this.namespaceCache = cacheManager.getCache(NAMESPACE_CACHE_NAME);
+            if (namespaceCache == null) {
+                cacheManager.addCache(NAMESPACE_CACHE_NAME);
+            }
+
+
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void shutdown() {
+        if (cacheManager != null) {
+            cacheManager.shutdown();
+            cacheManager = null;
+        }
+    }
+
+    public void addNamespace(String pfx, String namespace) {
+        try {
+            String savedNamespace = getNamespace(pfx);
+            //if the saved ns is the same one being saved, don't do anything
+            if (savedNamespace != null && savedNamespace.equals(namespace)) {
+                return;
+            }
+
+            namespaceCache.put(new Element(pfx, namespace));
+            namespaceManager.addNamespace(pfx, namespace);
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public String getNamespace(String pfx) {
+        //try in the cache first
+        Element element = namespaceCache.get(pfx);
+        if (element != null) {
+            return (String) element.getValue();
+        }
+
+        try {
+            String namespace = namespaceManager.getNamespace(pfx);
+            if (namespace != null) {
+                namespaceCache.put(new Element(pfx, namespace));
+                return namespace;
+            }
+        } catch (Exception e) {
+            //TODO: print or log?
+        }
+        return null;
+
+    }
+
+    public void removeNamespace(String pfx) {
+        try {
+            namespaceCache.remove(pfx);
+            namespaceManager.removeNamespace(pfx);
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public CloseableIteration<? extends Namespace, SailException> iterateNamespace() {
+        try {
+            //for this one we will go directly to the store
+            final CloseableIteration<? extends Namespace, RdfDAOException> iteration = namespaceManager.iterateNamespace();
+            return new CloseableIteration<Namespace, SailException>() {
+                @Override
+                public void close() throws SailException {
+                    iteration.close();
+                }
+
+                @Override
+                public boolean hasNext() throws SailException {
+                    return iteration.hasNext();
+                }
+
+                @Override
+                public Namespace next() throws SailException {
+                    return iteration.next();
+                }
+
+                @Override
+                public void remove() throws SailException {
+                    iteration.remove();
+                }
+            };
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void printStatistics() {
+        Statistics statistics = namespaceCache.getStatistics();
+        if (statistics != null) { //TODO: use a logger please
+            System.out.println("Namespace Cache Statisitics: ");
+            System.out.println("--Hits: \t" + statistics.getCacheHits());
+            System.out.println("--Misses: \t" + statistics.getCacheMisses());
+            System.out.println("--Total Count: \t" + statistics.getObjectCount());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java
new file mode 100644
index 0000000..b7f7623
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java
@@ -0,0 +1,165 @@
+package mvm.rya.rdftriplestore.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.OpenRDFUtil;
+import org.openrdf.model.*;
+import org.openrdf.repository.RepositoryConnection;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.rio.RDFHandlerException;
+import org.openrdf.rio.helpers.RDFHandlerBase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: RoshanP
+ * Date: 3/23/12
+ * Time: 9:50 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class CombineContextsRdfInserter extends RDFHandlerBase {
+
+    private final RepositoryConnection con;
+    private Resource[] contexts = new Resource[0];
+    private boolean preserveBNodeIDs;
+    private final Map<String, String> namespaceMap;
+    private final Map<String, BNode> bNodesMap;
+
+    public CombineContextsRdfInserter(RepositoryConnection con) {
+        this.con = con;
+        preserveBNodeIDs = true;
+        namespaceMap = new HashMap<String, String>();
+        bNodesMap = new HashMap<String, BNode>();
+    }
+
+    public void setPreserveBNodeIDs(boolean preserveBNodeIDs) {
+        this.preserveBNodeIDs = preserveBNodeIDs;
+    }
+
+    public boolean preservesBNodeIDs() {
+        return preserveBNodeIDs;
+    }
+
+    public void enforceContext(Resource... contexts) {
+        OpenRDFUtil.verifyContextNotNull(contexts);
+        this.contexts = contexts;
+    }
+
+    public boolean enforcesContext() {
+        return contexts.length != 0;
+    }
+
+    public Resource[] getContexts() {
+        return contexts;
+    }
+
+    @Override
+    public void endRDF()
+            throws RDFHandlerException {
+        for (Map.Entry<String, String> entry : namespaceMap.entrySet()) {
+            String prefix = entry.getKey();
+            String name = entry.getValue();
+
+            try {
+                if (con.getNamespace(prefix) == null) {
+                    con.setNamespace(prefix, name);
+                }
+            } catch (RepositoryException e) {
+                throw new RDFHandlerException(e);
+            }
+        }
+
+        namespaceMap.clear();
+        bNodesMap.clear();
+    }
+
+    @Override
+    public void handleNamespace(String prefix, String name) {
+        // FIXME: set namespaces directly when they are properly handled wrt
+        // rollback
+        // don't replace earlier declarations
+        if (prefix != null && !namespaceMap.containsKey(prefix)) {
+            namespaceMap.put(prefix, name);
+        }
+    }
+
+    @Override
+    public void handleStatement(Statement st)
+            throws RDFHandlerException {
+        Resource subj = st.getSubject();
+        URI pred = st.getPredicate();
+        Value obj = st.getObject();
+        Resource ctxt = st.getContext();
+
+        if (!preserveBNodeIDs) {
+            if (subj instanceof BNode) {
+                subj = mapBNode((BNode) subj);
+            }
+
+            if (obj instanceof BNode) {
+                obj = mapBNode((BNode) obj);
+            }
+
+            if (!enforcesContext() && ctxt instanceof BNode) {
+                ctxt = mapBNode((BNode) ctxt);
+            }
+        }
+
+        try {
+            if (enforcesContext()) {
+                Resource[] ctxts = contexts;
+                if (ctxt != null) {
+                    ctxts = combineContexts(contexts, ctxt);
+                }
+                con.add(subj, pred, obj, ctxts);
+            } else {
+                con.add(subj, pred, obj, ctxt);
+            }
+        } catch (RepositoryException e) {
+            throw new RDFHandlerException(e);
+        }
+    }
+
+    private BNode mapBNode(BNode bNode) {
+        BNode result = bNodesMap.get(bNode.getID());
+
+        if (result == null) {
+            result = con.getRepository().getValueFactory().createBNode();
+            bNodesMap.put(bNode.getID(), result);
+        }
+
+        return result;
+    }
+
+    public static Resource[] combineContexts(Resource[] contexts, Resource ctxt) {
+        if (contexts == null || ctxt == null) {
+            throw new IllegalArgumentException("Contexts cannot be null");
+        }
+        int length = contexts.length;
+        Resource[] ret = new Resource[length + 1];
+        System.arraycopy(contexts, 0, ret, 0, length);
+        ret[length] = ctxt;
+        return ret;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java
new file mode 100644
index 0000000..d86140b
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java
@@ -0,0 +1,58 @@
+package mvm.rya.rdftriplestore.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+
+/**
+ * Class DefaultStatistics
+ * Date: Apr 12, 2011
+ * Time: 1:31:05 PM
+ */
+public class DefaultStatistics extends EvaluationStatistics {
+
+    public DefaultStatistics() {
+    }
+
+    @Override
+    protected CardinalityCalculator createCardinalityCalculator() {
+        return new DefaultCardinalityCalculator();
+    }
+
+    public class DefaultCardinalityCalculator extends CardinalityCalculator {
+
+        double count = 0.0;
+
+        @Override
+        protected double getCardinality(StatementPattern sp) {
+            //based on how many (subj, pred, obj) are set
+//            int numSet = 3;
+//            if (sp.getSubjectVar().hasValue()) numSet--;
+//            if (sp.getPredicateVar().hasValue()) numSet--;
+//            if (sp.getObjectVar().hasValue()) numSet--;
+//            return numSet;
+            return count++;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java
new file mode 100644
index 0000000..891e122
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java
@@ -0,0 +1,59 @@
+package mvm.rya.rdftriplestore.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.model.Statement;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+/**
+ * StatementPattern gives fixed statements back
+ *
+ * Class FixedStatementPattern
+ * Date: Mar 12, 2012
+ * Time: 2:42:06 PM
+ */
+public class FixedStatementPattern extends StatementPattern {
+    public Collection<Statement> statements = new ArrayList();
+
+    public FixedStatementPattern() {
+    }
+
+    public FixedStatementPattern(Var subject, Var predicate, Var object) {
+        super(subject, predicate, object);
+    }
+
+    public FixedStatementPattern(Scope scope, Var subject, Var predicate, Var object) {
+        super(scope, subject, predicate, object);
+    }
+
+    public FixedStatementPattern(Var subject, Var predicate, Var object, Var context) {
+        super(subject, predicate, object, context);
+    }
+
+    public FixedStatementPattern(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) {
+        super(scope, subjVar, predVar, objVar, conVar);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java b/sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java
new file mode 100644
index 0000000..4f2e378
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java
@@ -0,0 +1,52 @@
+package mvm.rya.rdftriplestore.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+/**
+ * Class TransitivePropertySP
+ * Date: Mar 14, 2012
+ * Time: 5:23:10 PM
+ */
+public class TransitivePropertySP extends StatementPattern {
+
+    public TransitivePropertySP() {
+    }
+
+    public TransitivePropertySP(Var subject, Var predicate, Var object) {
+        super(subject, predicate, object);
+    }
+
+    public TransitivePropertySP(Scope scope, Var subject, Var predicate, Var object) {
+        super(scope, subject, predicate, object);
+    }
+
+    public TransitivePropertySP(Var subject, Var predicate, Var object, Var context) {
+        super(subject, predicate, object, context);
+    }
+
+    public TransitivePropertySP(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) {
+        super(scope, subjVar, predVar, objVar, conVar);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/resources/META-INF/org.openrdf.store.schemas
----------------------------------------------------------------------
diff --git a/sail/src/main/resources/META-INF/org.openrdf.store.schemas b/sail/src/main/resources/META-INF/org.openrdf.store.schemas
new file mode 100644
index 0000000..ad9993f
--- /dev/null
+++ b/sail/src/main/resources/META-INF/org.openrdf.store.schemas
@@ -0,0 +1 @@
+META-INF/schemas/cloudbasestore-schema.ttl
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl
----------------------------------------------------------------------
diff --git a/sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl b/sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl
new file mode 100644
index 0000000..708a964
--- /dev/null
+++ b/sail/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl
@@ -0,0 +1,20 @@
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>.
+@prefix rep: <http://www.openrdf.org/config/repository#>.
+@prefix sr: <http://www.openrdf.org/config/repository/sail#>.
+@prefix sail: <http://www.openrdf.org/config/sail#>.
+@prefix cbs: <http://www.openrdf.org/config/sail/cloudbasestore#>.
+
+[] a rep:Repository ;
+   rep:repositoryID "{%Repository ID|cloudbasestore%}" ;
+   rdfs:label "{%Repository title|Cloudbase store%}" ;
+   rep:repositoryImpl [
+      rep:repositoryType "openrdf:SailRepository" ;
+      sr:sailImpl [
+         sail:sailType "openrdf:RdfCloudTripleStore" ;
+         cbs:server "{%CBSail server|stratus13%}" ;
+         cbs:port "{%CBSail port|2181%}" ;
+         cbs:instance "{%CBSail instance|stratus%}" ;
+         cbs:user "{%CBSail user|root%}" ;
+         cbs:password "{%CBSail password|password%}" ;
+      ]
+   ].
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory
----------------------------------------------------------------------
diff --git a/sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory b/sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory
new file mode 100644
index 0000000..09a0661
--- /dev/null
+++ b/sail/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory
@@ -0,0 +1 @@
+mvm.rya.rdftriplestore.RdfCloudTripleStoreFactory
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/resources/ehcache.xml
----------------------------------------------------------------------
diff --git a/sail/src/main/resources/ehcache.xml b/sail/src/main/resources/ehcache.xml
new file mode 100644
index 0000000..7049c00
--- /dev/null
+++ b/sail/src/main/resources/ehcache.xml
@@ -0,0 +1,46 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="../config/ehcache.xsd"
+         updateCheck="false">
+    <diskStore path="java.io.tmpdir"/>
+    <cache name="namespace"
+            maxElementsInMemory="1000"
+            eternal="false"
+            timeToIdleSeconds="3600"
+            timeToLiveSeconds="3600"
+            overflowToDisk="false"
+            diskPersistent="false"
+            memoryStoreEvictionPolicy="FIFO"
+            >
+    </cache>
+    <defaultCache
+            maxElementsInMemory="50000"
+            eternal="false"
+            timeToIdleSeconds="0"
+            timeToLiveSeconds="0"
+            overflowToDisk="true"
+            maxElementsOnDisk="1000000"
+            diskPersistent="true"
+            diskExpiryThreadIntervalSeconds="120"
+            memoryStoreEvictionPolicy="FIFO"
+            >
+    </defaultCache>
+</ehcache>


[33/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java
index 211343d..ffb7b2d 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGeneratorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.*;
 
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java
index 4f37d97..96466cb 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/TupleReArrangerTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 
 import junit.framework.Assert;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java
index 830b9cd..38f7813 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGeneratorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 
 import java.util.ArrayList;
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java
index 19fc84c..181d4fb 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPrunerTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.*;
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java
index 86421e7..88e2841 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/StatementSerializerTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.Assert;
 import mvm.rya.indexing.accumulo.StatementSerializer;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
index 0bf2a21..e7e06d9 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java
index eb76b81..b6d5548 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/EntityOptimizerTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java
index 7d0082c..0da484a 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/StarQueryTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.entity;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.*;
 
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
index aeeb174..a0a3a03 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.HashSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java
index 3152ac3..2097a02 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.ArrayList;
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java
index 5149abe..4c22857 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerSfTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.geo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.HashSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
index 5e18696..8ca96bc 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.geo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement;
 import info.aduna.iteration.CloseableIteration;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
index c0ba19b..60d237d 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
@@ -3,6 +3,26 @@
  */
 package mvm.rya.indexing.accumulo.temporal;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java
index 06b4484..6363372 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.temporal;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.TimeZone;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java
index 3901cc9..6213826 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/TemporalIntervalTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.temporal;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Arrays;
 
 import mvm.rya.indexing.TemporalInstant;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java
index 438775b..98acf39 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloConstantIndexSetTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
 import java.util.List;
 import java.util.Set;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java
index 716a3a5..c8ea57d 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.List;
 import java.util.Set;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java
index fc3caf5..f93f58a 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/AccumuloIndexSetTest2.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Arrays;
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java
index 2d997af..f7f3cbf 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerIntegrationTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -527,4 +547,4 @@ public class PrecompJoinOptimizerIntegrationTest {
     
     
     
-    
\ No newline at end of file
+    

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java
index 9365744..396224f 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/PrecompJoinOptimizerTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java
index f4baafe..bac9871 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external.tupleSet;
 
 /*
- * #%L
- * mvm.rya.rya.indexing
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.indexing.external.ExternalProcessor;
 import mvm.rya.indexing.external.ExternalProcessor.BindingSetAssignmentCollector;
 import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java
index 72dc017..aec959e 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/QueryVariableNormalizerTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external.tupleSet;
 
 /*
- * #%L
- * mvm.rya.rya.indexing
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.List;
 import java.util.Set;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
index b3638a5..6449486 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external.tupleSet;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.*;
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
index 07cdb6b..002a0e1 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external.tupleSet;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.List;
 import java.util.Set;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/pom.xml
----------------------------------------------------------------------
diff --git a/extras/indexingExample/pom.xml b/extras/indexingExample/pom.xml
new file mode 100644
index 0000000..2ca3417
--- /dev/null
+++ b/extras/indexingExample/pom.xml
@@ -0,0 +1,99 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya.extras</artifactId>
+        <version>3.2.10-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>rya.indexing.example</artifactId>
+    <name>Apache Rya Secondary Indexing Example</name>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.prospector</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>mongodb.rya</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.indexing</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.indexing</artifactId>
+            <classifier>accumulo-server</classifier>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.indexing</artifactId>
+            <classifier>map-reduce</classifier>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.accumulo</groupId>
+            <artifactId>accumulo-core</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.locationtech.geomesa</groupId>
+            <artifactId>geomesa-distributed-runtime</artifactId>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <configuration>
+                    <excludes combine.children="append">
+                        <!--  Vagrant may leave around some files.  These are not checked in -->
+                        <exclude>**/src/main/vagrant/.vagrant/**</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptors>
+                        <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    </descriptors>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/assembly/assembly.xml b/extras/indexingExample/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..0e8fd6d
--- /dev/null
+++ b/extras/indexingExample/src/main/assembly/assembly.xml
@@ -0,0 +1,70 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+
+    <id>distribution</id>
+    <formats>
+        <format>zip</format>
+    </formats>
+    <includeBaseDirectory>false</includeBaseDirectory>
+
+    <dependencySets>
+        <dependencySet>
+            <outputDirectory>accumulo/lib/ext</outputDirectory>
+            <includes>
+                <include>org.apache.rya:rya.indexing:*:accumulo-server</include>
+                <include>org.locationtech.geomesa:geomesa-distributed-runtime:*</include>
+            </includes>
+        </dependencySet>
+        <dependencySet>
+            <outputDirectory>map-reduce</outputDirectory>
+            <includes>
+                <include>org.apache.rya:rya.indexing:*:map-reduce</include>
+            </includes>
+        </dependencySet>
+        <dependencySet>
+            <outputDirectory>dist/lib</outputDirectory>
+            <includes>
+                <include>*</include>
+            </includes>
+            <excludes>
+                <!-- Do not include the example jar. Example batch script builds the example -->
+                <exclude>org.apache.rya:rya.indexing.example</exclude>
+
+                <!-- Do not include the MR or Accumulo Server builds -->
+                <exclude>org.apache.rya:rya.indexing:*:accumulo-server</exclude>
+                <exclude>org.apache.rya:rya.indexing:*:map-reduce</exclude>
+            </excludes>
+            <scope>test</scope>
+        </dependencySet>
+    </dependencySets>
+    <files>
+        <file>
+            <source>src/main/scripts/RunRyaDirectExample.bat</source>
+            <outputDirectory>dist</outputDirectory>
+        </file>
+        <file>
+            <source>src/main/java/RyaDirectExample.java</source>
+            <outputDirectory>dist</outputDirectory>
+        </file>
+    </files>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/src/main/java/EntityDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/EntityDirectExample.java b/extras/indexingExample/src/main/java/EntityDirectExample.java
new file mode 100644
index 0000000..ae83520
--- /dev/null
+++ b/extras/indexingExample/src/main/java/EntityDirectExample.java
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import java.util.List;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.commons.lang.Validate;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.Update;
+import org.openrdf.query.UpdateExecutionException;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.sail.Sail;
+
+public class EntityDirectExample {
+    private static final Logger log = Logger.getLogger(EntityDirectExample.class);
+
+    //
+    // Connection configuration parameters
+    //
+
+    private static final boolean USE_MOCK_INSTANCE = true;
+    private static final boolean PRINT_QUERIES = true;
+    private static final String INSTANCE = "instance";
+    private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
+    private static final String AUTHS = "U";
+    
+    public static void main(String[] args) throws Exception {
+        Configuration conf = getConf();
+        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
+        
+        log.info("Creating the tables as root.");
+        SailRepository repository = null;
+        SailRepositoryConnection conn = null;
+      
+        try {
+            log.info("Connecting to Indexing Sail Repository.");
+            
+            Sail extSail = RyaSailFactory.getInstance(conf);
+            repository = new SailRepository(extSail);
+            repository.initialize();
+            conn = repository.getConnection();
+
+            log.info("Running SPARQL Example: Add and Delete");
+            testAddAndDelete(conn);
+            log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
+            testAddAndTemporalSearchWithPCJ(conn);
+            
+        } finally {
+            log.info("Shutting down");
+            closeQuietly(conn);
+            closeQuietly(repository);
+        }
+    }
+
+    private static void closeQuietly(SailRepository repository) {
+        if (repository != null) {
+            try {
+                repository.shutDown();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static void closeQuietly(SailRepositoryConnection conn) {
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    
+
+
+   
+    public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
+            RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
+            AccumuloException, AccumuloSecurityException, TableNotFoundException {
+
+        // Add data
+        String query = "INSERT DATA\n"//
+                + "{ GRAPH <http://updated/test> {\n"//
+                + "  <http://acme.com/people/Mike> " //
+                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
+                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
+
+        log.info("Performing Query");
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+        
+        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
+                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
+        CountingResultHandler resultHandler = new CountingResultHandler();
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+
+        Validate.isTrue(resultHandler.getCount() == 1);
+        resultHandler.resetCount();
+
+        //TODO delete currently not implemented in AccumuloRyaDAO for 
+//        // Delete Data
+//        query = "DELETE DATA\n" //
+//                + "{ GRAPH <http://updated/test> {\n"
+//                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+//                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
+//
+//        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+//        update.execute();
+//
+//        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
+//                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
+//        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(resultHandler);
+//        log.info("Result count : " + resultHandler.getCount());
+//
+//        Validate.isTrue(resultHandler.getCount() == 0);
+    }
+    
+    
+
+    
+    
+    private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
+
+        // create some resources and literals to make statements out of
+
+        String sparqlInsert = "PREFIX pref: <http://www.model/pref#> \n"
+                + "INSERT DATA {\n" //
+                + "<urn:Bob>       a       pref:Person ;\n" //
+                + "     pref:hasProperty1 'property1' ;\n" //  one second
+                + "     pref:hasProperty2 'property2' ;\n" //   2 seconds
+                + "     pref:hasProperty3 'property3' .\n" //   3 seconds
+                + "<urn:Fred>      a       pref:Person ; \n" //
+                + "     pref:hasProperty4 'property4' ; \n" //
+                + "     pref:hasProperty5 'property5' ; \n" //
+                + "}";
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
+        update.execute();
+        
+        String queryString = "PREFIX pref: <http://www.model/pref#> \n" //
+                + "SELECT ?x ?z \n" //
+                + "WHERE { \n"
+                + "  ?x a ?z. \n"
+                + "  ?x pref:hasProperty1 'property1' . \n"//
+                + "  ?x pref:hasProperty2 'property2' . \n"//
+                + "  ?x pref:hasProperty3 'property3' . \n"//
+                + "}";//
+       
+        
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        CountingResultHandler tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        Validate.isTrue(tupleHandler.getBsSize() == 2);
+        
+        queryString = "PREFIX pref: <http://www.model/pref#> \n" //
+                + "SELECT ?x ?w ?z \n" //
+                + "WHERE { \n"
+                + "  ?x a ?z. \n"
+                + "  ?x pref:hasProperty4 'property4' . \n"//
+                + "  ?x pref:hasProperty5 ?w . \n"//
+                + "}";//
+       
+        
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        Validate.isTrue(tupleHandler.getBsSize() == 3);
+        
+        
+        queryString = "PREFIX pref: <http://www.model/pref#> " 
+                + "SELECT ?v ?w ?x ?y ?z " 
+                + "WHERE { " 
+                + "  ?w a ?z  . " 
+                + "  ?w pref:hasProperty1 ?v . " 
+                + "  ?w pref:hasProperty2 'property2' . " 
+                + "  ?w pref:hasProperty3 'property3' . " 
+                + "  ?x a ?z  . "
+                + "  ?x pref:hasProperty4 'property4' . " 
+                + "  ?x pref:hasProperty5 ?y . " 
+                + "}";
+       
+        
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        Validate.isTrue(tupleHandler.getBsSize() == 5);
+        
+    }
+    
+    
+    private static Configuration getConf() {
+
+        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+        conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
+        conf.set(ConfigUtils.USE_ENTITY, "true");
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
+        conf.set(ConfigUtils.ENTITY_TABLENAME, RYA_TABLE_PREFIX + "entity");
+        conf.set(ConfigUtils.CLOUDBASE_USER, "root");
+        conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
+        conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
+        conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
+        conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
+
+        return conf;
+    }
+    
+
+    private static class CountingResultHandler implements TupleQueryResultHandler {
+        private int count = 0;
+        private int bindingSize = 0;
+        private boolean bsSizeSet = false;
+
+        public int getCount() {
+            return count;
+        }
+        
+        public int getBsSize() {
+            return bindingSize;
+        }
+        
+        public void resetBsSize() {
+            bindingSize = 0;
+            bsSizeSet = false;
+        }
+
+        public void resetCount() {
+            this.count = 0;
+        }
+
+        @Override
+        public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
+            count++;
+            if(!bsSizeSet) {
+                bindingSize = arg0.size();
+                bsSizeSet = true;
+            }
+            System.out.println(arg0);
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+
+        @Override
+        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
new file mode 100644
index 0000000..860df06
--- /dev/null
+++ b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
@@ -0,0 +1,307 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.List;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.mongodb.MongoDBRdfConfiguration;
+
+import org.apache.commons.lang.Validate;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.openrdf.model.Namespace;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.Update;
+import org.openrdf.query.UpdateExecutionException;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.RepositoryResult;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.sail.Sail;
+
+public class MongoRyaDirectExample {
+    private static final Logger log = Logger.getLogger(MongoRyaDirectExample.class);
+
+    //
+    // Connection configuration parameters
+    //
+
+    private static final boolean PRINT_QUERIES = true;
+    private static final String MONGO_DB = "rya";
+    private static final String MONGO_COLL_PREFIX = "rya_";
+
+    public static void main(String[] args) throws Exception {
+        Configuration conf = getConf();
+        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
+  
+        SailRepository repository = null;
+        SailRepositoryConnection conn = null;
+        try {
+            log.info("Connecting to Indexing Sail Repository.");
+            Sail sail = RyaSailFactory.getInstance(conf);
+            repository = new SailRepository(sail);
+            repository.initialize();
+            conn = repository.getConnection();
+
+            long start = System.currentTimeMillis();
+            log.info("Running SPARQL Example: Add and Delete");
+            testAddAndDelete(conn);
+            testAddAndDeleteNoContext(conn);
+            testAddNamespaces(conn);
+            testAddPointAndWithinSearch(conn);
+
+            log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
+        } finally {
+            log.info("Shutting down");
+            closeQuietly(conn);
+            closeQuietly(repository);
+        }
+    }
+
+    private static void testAddPointAndWithinSearch(SailRepositoryConnection conn) throws Exception {
+
+        String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "INSERT DATA { " //
+                + "  <urn:feature> a geo:Feature ; " //
+                + "    geo:hasGeometry [ " //
+                + "      a geo:Point ; " //
+                + "      geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
+                + "    ] . " //
+                + "}";
+
+        Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
+        u.execute();
+
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // ring containing point
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt " //
+                + "{" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs
+
+        // ring outside point
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt " //
+                + "{" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+    private static void closeQuietly(SailRepository repository) {
+        if (repository != null) {
+            try {
+                repository.shutDown();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static void closeQuietly(SailRepositoryConnection conn) {
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static Configuration getConf() {
+
+        Configuration conf = new Configuration();
+        conf.set(ConfigUtils.USE_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, MONGO_DB);
+        conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, MONGO_COLL_PREFIX);
+        conf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT");
+        conf.set(ConfigUtils.USE_GEO, "true");
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, MONGO_COLL_PREFIX);
+        
+        return conf;
+    }
+
+
+
+    public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
+            UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
+
+        // Add data
+        String query = "INSERT DATA\n"//
+                + "{ GRAPH <http://updated/test> {\n"//
+                + "  <http://acme.com/people/Mike> " //
+                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
+                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
+
+        log.info("Performing Query");
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+
+         query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
+         CountingResultHandler resultHandler = new CountingResultHandler();
+         TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+         tupleQuery.evaluate(resultHandler);
+         log.info("Result count : " + resultHandler.getCount());
+        
+         Validate.isTrue(resultHandler.getCount() == 2);
+        
+         resultHandler.resetCount();
+        
+         // Delete Data
+         query = "DELETE DATA\n" //
+         + "{ GRAPH <http://updated/test> {\n"
+         + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+         + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
+        
+         update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+         update.execute();
+        
+         query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
+         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+         tupleQuery.evaluate(resultHandler);
+         log.info("Result count : " + resultHandler.getCount());
+        
+         Validate.isTrue(resultHandler.getCount() == 0);
+    }
+
+    public static void testAddNamespaces(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
+    UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
+
+    	conn.setNamespace("rya", "http://rya.com");
+    	RepositoryResult<Namespace> results = conn.getNamespaces();
+    	for (Namespace space : results.asList()){
+    		System.out.println(space.getName() + ", " + space.getPrefix());
+    	}
+      }
+
+    public static void testAddAndDeleteNoContext(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
+    UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
+
+    	// Add data
+    	String query = "INSERT DATA\n"//
+    			+ "{ \n"//
+    			+ "  <http://acme.com/people/Mike> " //
+    			+ "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
+    			+ "       <http://acme.com/actions/likes> \"Avocados\" .\n" + " }";
+
+    	log.info("Performing Query");
+
+    	Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+    	update.execute();
+
+    	query = "select ?p ?o {<http://acme.com/people/Mike> ?p ?o . }";
+    	CountingResultHandler resultHandler = new CountingResultHandler();
+    	TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+    	tupleQuery.evaluate(resultHandler);
+    	log.info("Result count : " + resultHandler.getCount());
+
+    	Validate.isTrue(resultHandler.getCount() == 2);
+
+    	resultHandler.resetCount();
+
+    	// Delete Data
+    	query = "DELETE DATA\n" //
+    			+ "{ \n"
+    			+ "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+    			+ "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}";
+
+    	update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+    	update.execute();
+
+    	query = "select ?p ?o { {<http://acme.com/people/Mike> ?p ?o . }}";
+    	tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+    	tupleQuery.evaluate(resultHandler);
+    	log.info("Result count : " + resultHandler.getCount());
+
+    	Validate.isTrue(resultHandler.getCount() == 0);
+    }
+
+    private static class CountingResultHandler implements TupleQueryResultHandler {
+        private int count = 0;
+
+        public int getCount() {
+            return count;
+        }
+
+        public void resetCount() {
+            this.count = 0;
+        }
+
+        @Override
+        public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
+            count++;
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+
+        @Override
+        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+    }
+}


[09/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java
deleted file mode 100644
index 14d532e..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java
+++ /dev/null
@@ -1,303 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.layout.TablePrefixLayoutStrategy;
-import mvm.rya.api.persist.RdfEvalStatsDAO;
-import mvm.rya.joinselect.AccumuloSelectivityEvalDAO;
-import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.BatchWriter;
-import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Mutation;
-import org.apache.accumulo.core.data.Range;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.security.Authorizations;
-import org.apache.hadoop.io.Text;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.parser.ParsedQuery;
-import org.openrdf.query.parser.sparql.SPARQLParser;
-
-public class RdfCloudTripleStoreSelectivityEvaluationStatisticsTest {
-
-    // TODO fix table names!!!
-
-    private static final String DELIM = "\u0000";
-    private final byte[] EMPTY_BYTE = new byte[0];
-    private final Value EMPTY_VAL = new Value(EMPTY_BYTE);
-
-    private String q1 = ""//
-            + "SELECT ?h  " //
-            + "{" //
-            + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-            + "  ?h <uri:barksAt> <uri:cat> ."//
-            + "  ?h <uri:peesOn> <uri:hydrant> . "//
-            + "}";//
-
-    private Connector conn;
-    AccumuloRdfConfiguration arc;
-    BatchWriterConfig config;
-    Instance mock;
-
-    @Before
-    public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
-
-        mock = new MockInstance("accumulo");
-        PasswordToken pToken = new PasswordToken("pass".getBytes());
-        conn = mock.getConnector("user", pToken);
-
-        config = new BatchWriterConfig();
-        config.setMaxMemory(1000);
-        config.setMaxLatency(1000, TimeUnit.SECONDS);
-        config.setMaxWriteThreads(10);
-
-        if (conn.tableOperations().exists("rya_prospects")) {
-            conn.tableOperations().delete("rya_prospects");
-        }
-        if (conn.tableOperations().exists("rya_selectivity")) {
-            conn.tableOperations().delete("rya_selectivity");
-        }
-
-        arc = new AccumuloRdfConfiguration();
-        arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy());
-        arc.setMaxRangesForScanner(300);
-
-    }
-
-    @Test
-    public void testOptimizeQ1() throws Exception {
-
-        RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-        AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-        accc.setConf(arc);
-        accc.setRdfEvalDAO(res);
-        accc.setConnector(conn);
-        accc.init();
-
-        BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-        BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-        String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-        String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-        String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-        List<Mutation> mList = new ArrayList<Mutation>();
-        List<Mutation> mList2 = new ArrayList<Mutation>();
-        List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate",
-                "predicatesubject");
-        Mutation m1, m2, m3, m4;
-
-        m1 = new Mutation(s1 + DELIM + "1");
-        m1.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-        m2 = new Mutation(s2 + DELIM + "2");
-        m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-        m3 = new Mutation(s3 + DELIM + "3");
-        m3.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-        mList.add(m1);
-        mList.add(m2);
-        mList.add(m3);
-
-        bw1.addMutations(mList);
-        bw1.close();
-
-//        Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-//        scan.setRange(new Range());
-
-//        for (Map.Entry<Key, Value> entry : scan) {
-//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
-//            System.out.println("Key is " + entry.getKey());
-//            System.out.println("Value is " + (new String(entry.getValue().get())));
-//        }
-
-        m1 = new Mutation(s1);
-        m2 = new Mutation(s2);
-        m3 = new Mutation(s3);
-        m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-        m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-        int i = 2;
-        int j = 3;
-        int k = 4;
-        Long count1;
-        Long count2;
-        Long count3;
-
-        for (String s : sList) {
-            count1 = (long) i;
-            count2 = (long) j;
-            count3 = (long) k;
-            m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-            m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-            m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-            i = 2 * i;
-            j = 2 * j;
-            k = 2 * k;
-        }
-        mList2.add(m1);
-        mList2.add(m2);
-        mList2.add(m3);
-        mList2.add(m4);
-        bw2.addMutations(mList2);
-        bw2.close();
-
-//        scan = conn.createScanner("rya_selectivity", new Authorizations());
-//        scan.setRange(new Range());
-
-//        for (Map.Entry<Key, Value> entry : scan) {
-//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
-//            System.out.println("Key is " + entry.getKey());
-//            System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-//
-//        }
-
-        TupleExpr te = getTupleExpr(q1);
-        System.out.println(te);
-
-        RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-        double card = ars.getCardinality(te);
-
-        Assert.assertEquals(6.3136, card, .0001);
-
-    }
-
-    @Test
-    public void testOptimizeQ1ZeroCard() throws Exception {
-
-        RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-        AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-        accc.setConf(arc);
-        accc.setConnector(conn);
-        accc.setRdfEvalDAO(res);
-        accc.init();
-
-        BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-        BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-        String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-        String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-        String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-        List<Mutation> mList = new ArrayList<Mutation>();
-        List<Mutation> mList2 = new ArrayList<Mutation>();
-        List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate",
-                "predicatesubject");
-        Mutation m1, m2, m3, m4;
-
-        m1 = new Mutation(s1 + DELIM + "1");
-        m1.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-        m2 = new Mutation(s2 + DELIM + "2");
-        m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-        // m3 = new Mutation(s3 + DELIM + "3");
-        // m3.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-        mList.add(m1);
-        mList.add(m2);
-        // mList.add(m3);
-
-        bw1.addMutations(mList);
-        bw1.close();
-
-//        Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-//        scan.setRange(new Range());
-
-//        for (Map.Entry<Key, Value> entry : scan) {
-//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
-//            System.out.println("Key is " + entry.getKey());
-//            System.out.println("Value is " + (new String(entry.getValue().get())));
-//        }
-
-        m1 = new Mutation(s1);
-        m2 = new Mutation(s2);
-        m3 = new Mutation(s3);
-        m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-        m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-        int i = 2;
-        int j = 3;
-        int k = 4;
-        Long count1;
-        Long count2;
-        Long count3;
-
-        for (String s : sList) {
-            count1 = (long) i;
-            count2 = (long) j;
-            count3 = (long) k;
-            m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-            m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-            m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-            i = 2 * i;
-            j = 2 * j;
-            k = 2 * k;
-        }
-        mList2.add(m1);
-        mList2.add(m2);
-        mList2.add(m3);
-        mList2.add(m4);
-        bw2.addMutations(mList2);
-        bw2.close();
-
-//        scan = conn.createScanner("rya_selectivity", new Authorizations());
-//        scan.setRange(new Range());
-
-//        for (Map.Entry<Key, Value> entry : scan) {
-//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
-//            System.out.println("Key is " + entry.getKey());
-//            System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-//
-//        }
-
-        TupleExpr te = getTupleExpr(q1);
-        System.out.println(te);
-
-        RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-        double card = ars.getCardinality(te);
-
-        Assert.assertEquals(4.04, card, .0001);
-
-    }
-
-    private TupleExpr getTupleExpr(String query) throws MalformedQueryException {
-
-        SPARQLParser sp = new SPARQLParser();
-        ParsedQuery pq = sp.parseQuery(query, null);
-
-        return pq.getTupleExpr();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java
deleted file mode 100644
index 66ba33a..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java
+++ /dev/null
@@ -1,114 +0,0 @@
-package mvm.rya.triplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.Iterations;
-import junit.framework.TestCase;
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.rdftriplestore.RdfCloudTripleStore;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-
-import org.apache.accumulo.core.Constants;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.admin.SecurityOperations;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.security.TablePermission;
-import org.junit.Test;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-public class SameAsTest extends TestCase {
-    private String user = "user";
-    private String pwd = "pwd";
-    private String instance = "myinstance";
-    private String tablePrefix = "t_";
-    private Authorizations auths = Constants.NO_AUTHS;
-    private Connector connector;
-    private AccumuloRyaDAO ryaDAO;
-    private ValueFactory vf = new ValueFactoryImpl();
-    private String namespace = "urn:test#";
-    private AccumuloRdfConfiguration conf;
-
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        connector = new MockInstance(instance).getConnector(user, pwd.getBytes());
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX);
-        SecurityOperations secOps = connector.securityOperations();
-        secOps.createUser(user, pwd.getBytes(), auths);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ);
-
-        conf = new AccumuloRdfConfiguration();
-        ryaDAO = new AccumuloRyaDAO();
-        ryaDAO.setConnector(connector);
-        conf.setTablePrefix(tablePrefix);
-        ryaDAO.setConf(conf);
-        ryaDAO.init();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
-        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
-        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
-        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX);
-    }
-
-    @Test
-    //This isn't a good test.  It's simply a cut-and-paste from a test that was failing in a different package in the SameAsVisitor.
-    public void testGraphConfiguration() throws Exception {
-        URI a = vf.createURI(namespace, "a");
-        Statement statement = new StatementImpl(a, vf.createURI(namespace, "p"), vf.createLiteral("l"));
-        Statement statement2 = new StatementImpl(a, vf.createURI(namespace, "p2"), vf.createLiteral("l"));
-        ryaDAO.add(RdfToRyaConversions.convertStatement(statement));
-        ryaDAO.add(RdfToRyaConversions.convertStatement(statement2));
-        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "b"), vf.createURI(namespace, "p"), vf.createLiteral("l"))));
-        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "c"), vf.createURI(namespace, "n"), vf.createLiteral("l"))));
-
-        // build a connection
-        RdfCloudTripleStore store = new RdfCloudTripleStore();
-        store.setConf(conf);
-        store.setRyaDAO(ryaDAO);
-
-        InferenceEngine inferenceEngine = new InferenceEngine();
-        inferenceEngine.setRyaDAO(ryaDAO);
-        store.setInferenceEngine(inferenceEngine);
-        
-        store.initialize();
-
-        System.out.println(Iterations.asList(store.getConnection().getStatements(a, vf.createURI(namespace, "p"), vf.createLiteral("l"), false, new Resource[0])).size());
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/resources/cdrdf.xml
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/resources/cdrdf.xml b/sail/rya.sail.impl/src/test/resources/cdrdf.xml
deleted file mode 100644
index 506b017..0000000
--- a/sail/rya.sail.impl/src/test/resources/cdrdf.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  #%L
-  mvm.rya.rya.sail.impl
-  %%
-  Copyright (C) 2014 Rya
-  %%
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  #L%
-  -->
-
-<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
-	xmlns:cd="http://www.recshop.fake/cd#">
-
-	<rdf:Description rdf:about="http://www.recshop.fake/cd/Empire_Burlesque">
-		<cd:artist>Bob Dylan</cd:artist>
-		<cd:country>USA</cd:country>
-		<cd:company>Columbia</cd:company>
-		<cd:price>10.90</cd:price>
-		<cd:year>1985</cd:year>
-	</rdf:Description>
-
-	<rdf:Description rdf:about="http://www.recshop.fake/cd/Hide_your_fingers">
-		<cd:artist>Bonnie Tyler</cd:artist>
-		<cd:country>UK</cd:country>
-		<cd:company>CBS Records</cd:company>
-		<cd:price>9.90</cd:price>
-		<cd:year>1993</cd:year>
-	</rdf:Description>
-</rdf:RDF>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/resources/namedgraphs.trig
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/resources/namedgraphs.trig b/sail/rya.sail.impl/src/test/resources/namedgraphs.trig
deleted file mode 100644
index 748d276..0000000
--- a/sail/rya.sail.impl/src/test/resources/namedgraphs.trig
+++ /dev/null
@@ -1,37 +0,0 @@
-@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
-@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
-@prefix swp: <http://www.w3.org/2004/03/trix/swp-1/> .
-@prefix dc: <http://purl.org/dc/elements/1.1/> .
-@prefix ex: <http://www.example.org/vocabulary#> .
-@prefix : <http://www.example.org/exampleDocument#> .
-:G1 { :Monica ex:name "Monica Murphy" .
-      :Monica ex:homepage <http://www.monicamurphy.org> .
-      :Monica ex:email <ma...@monicamurphy.org> .
-      :Monica ex:one <ma...@monicamurphy.org> .
-      :Monica ex:two <ma...@monicamurphy.org> .
-      :Monica ex:three <ma...@monicamurphy.org> .
-      :Monica ex:four <ma...@monicamurphy.org> .
-      :Monica ex:five <ma...@monicamurphy.org> .
-      :Monica ex:six <ma...@monicamurphy.org> .
-      :Monica ex:seven <ma...@monicamurphy.org> .
-      :Monica ex:eight <ma...@monicamurphy.org> .
-      :Monica ex:nine <ma...@monicamurphy.org> .
-      :Monica ex:ten <ma...@monicamurphy.org> .
-      :Monica ex:hasSkill ex:Management }
-
-:G2 { :Monica rdf:type ex:Person .
-      :Monica ex:hasSkill ex:Programming }
-
-:G4 { :Phobe ex:name "Phobe Buffet" }
-
-:G3 { :G1 swp:assertedBy _:w1 .
-      _:w1 swp:authority :Chris .
-      _:w1 dc:date "2003-10-02"^^xsd:date .
-      :G2 swp:quotedBy _:w2 .
-      :G4 swp:assertedBy _:w2 .
-      _:w2 dc:date "2003-09-03"^^xsd:date .
-      _:w2 swp:authority :Tom .
-      :Chris rdf:type ex:Person .
-      :Chris ex:email <ma...@bizer.de>.
-      :Tom rdf:type ex:Person .
-      :Tom ex:email <ma...@bizer.de>}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/resources/ntriples
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/resources/ntriples b/sail/rya.sail.impl/src/test/resources/ntriples
deleted file mode 100644
index edf1190..0000000
--- a/sail/rya.sail.impl/src/test/resources/ntriples
+++ /dev/null
@@ -1 +0,0 @@
-<urn:lubm:rdfts#GraduateStudent> <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> <urn:lubm:rdfts#Student> .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/resources/reification.xml
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/resources/reification.xml b/sail/rya.sail.impl/src/test/resources/reification.xml
deleted file mode 100644
index 414800f..0000000
--- a/sail/rya.sail.impl/src/test/resources/reification.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  #%L
-  mvm.rya.rya.sail.impl
-  %%
-  Copyright (C) 2014 Rya
-  %%
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  #L%
-  -->
-
-<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
-         xmlns:cd="http://www.recshop.fake/cd#"
-        xmlns:mm="http://mvm.com/owl/2010/10/mm.owl#">
-
-    <rdf:Description rdf:nodeID="A4">
-        <rdf:subject
-                rdf:resource="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_Processor:0:CIM_ComputerSystem:nimbus02.bullpen.net"/>
-        <rdf:predicate rdf:resource="http://mvm.com/owl/2010/10/mm.owl#loadPercentage"/>
-        <rdf:object rdf:datatype="http://www.w3.org/2001/XMLSchema#int">1</rdf:object>
-        <rdf:type rdf:resource="http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement"/>
-        <mm:reportedAt rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2011-01-07T21:29:45.545Z</mm:reportedAt>
-    </rdf:Description>
-
-</rdf:RDF>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/resources/univ-bench.owl
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/resources/univ-bench.owl b/sail/rya.sail.impl/src/test/resources/univ-bench.owl
deleted file mode 100644
index 691a330..0000000
--- a/sail/rya.sail.impl/src/test/resources/univ-bench.owl
+++ /dev/null
@@ -1,466 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<rdf:RDF
-  xmlns="urn:lubm:rdfts#"
-  xml:base="urn:lubm:rdfts#"
-  xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
-  xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
-  xmlns:owl="http://www.w3.org/2002/07/owl#"
->
-
-<owl:Class rdf:ID="AdministrativeStaff">
-  <rdfs:label>administrative staff worker</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Employee" />
-</owl:Class>
-
-<owl:Class rdf:ID="Article">
-  <rdfs:label>article</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Publication" />
-</owl:Class>
-
-<owl:Class rdf:ID="AssistantProfessor">
-  <rdfs:label>assistant professor</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Professor" />
-</owl:Class>
-
-<owl:Class rdf:ID="AssociateProfessor">
-  <rdfs:label>associate professor</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Professor" />
-</owl:Class>
-
-<owl:Class rdf:ID="Book">
-  <rdfs:label>book</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Publication" />
-</owl:Class>
-
-<owl:Class rdf:ID="Chair">
-  <rdfs:label>chair</rdfs:label>
-  <owl:intersectionOf rdf:parseType="Collection">
-  <owl:Class rdf:about="#Person" />
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#headOf" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#Department" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </owl:intersectionOf>
-  <rdfs:subPropertyOf rdf:resource="#Professor" />
-</owl:Class>
-
-<owl:Class rdf:ID="ClericalStaff">
-  <rdfs:label>clerical staff worker</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#AdministrativeStaff" />
-</owl:Class>
-
-<owl:Class rdf:ID="College">
-  <rdfs:label>school</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Organization" />
-</owl:Class>
-
-<owl:Class rdf:ID="ConferencePaper">
-  <rdfs:label>conference paper</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Article" />
-</owl:Class>
-
-<owl:Class rdf:ID="Course">
-  <rdfs:label>teaching course</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Work" />
-</owl:Class>
-
-<owl:Class rdf:ID="Dean">
-  <rdfs:label>dean</rdfs:label>
-  <owl:intersectionOf rdf:parseType="Collection">
-  <owl:Class rdf:about="#Person" />
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#headOf" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#College" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </owl:intersectionOf>
-  <rdfs:subPropertyOf rdf:resource="#Professor" />
-</owl:Class>
-
-<owl:Class rdf:ID="Department">
-  <rdfs:label>university department</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Organization" />
-</owl:Class>
-
-<owl:Class rdf:ID="Director">
-  <rdfs:label>director</rdfs:label>
-  <owl:intersectionOf rdf:parseType="Collection">
-  <owl:Class rdf:about="#Person" />
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#headOf" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#Program" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </owl:intersectionOf>
-</owl:Class>
-
-<owl:Class rdf:ID="Employee">
-  <rdfs:label>Employee</rdfs:label>
-  <owl:intersectionOf rdf:parseType="Collection">
-  <owl:Class rdf:about="#Person" />
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#worksFor" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#Organization" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </owl:intersectionOf>
-</owl:Class>
-
-<owl:Class rdf:ID="Faculty">
-  <rdfs:label>faculty member</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Employee" />
-</owl:Class>
-
-<owl:Class rdf:ID="FullProfessor">
-  <rdfs:label>full professor</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Professor" />
-</owl:Class>
-
-<owl:Class rdf:ID="GraduateCourse">
-  <rdfs:label>Graduate Level Courses</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Course" />
-</owl:Class>
-
-<owl:Class rdf:ID="GraduateStudent">
-  <rdfs:label>graduate student</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Person" />
-  <rdfs:subPropertyOf>
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#takesCourse" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#GraduateCourse" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </rdfs:subPropertyOf>
-</owl:Class>
-
-<owl:Class rdf:ID="Institute">
-  <rdfs:label>institute</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Organization" />
-</owl:Class>
-
-<owl:Class rdf:ID="JournalArticle">
-  <rdfs:label>journal article</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Article" />
-</owl:Class>
-
-<owl:Class rdf:ID="Lecturer">
-  <rdfs:label>lecturer</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Faculty" />
-</owl:Class>
-
-<owl:Class rdf:ID="Manual">
-  <rdfs:label>manual</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Publication" />
-</owl:Class>
-
-<owl:Class rdf:ID="Organization">
-  <rdfs:label>organization</rdfs:label>
-</owl:Class>
-
-<owl:Class rdf:ID="Person">
-  <rdfs:label>person</rdfs:label>
-</owl:Class>
-
-<owl:Class rdf:ID="PostDoc">
-  <rdfs:label>post doctorate</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Faculty" />
-</owl:Class>
-
-<owl:Class rdf:ID="Professor">
-  <rdfs:label>professor</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Faculty" />
-</owl:Class>
-
-<owl:Class rdf:ID="Program">
-  <rdfs:label>program</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Organization" />
-</owl:Class>
-
-<owl:Class rdf:ID="Publication">
-  <rdfs:label>publication</rdfs:label>
-</owl:Class>
-
-<owl:Class rdf:ID="Research">
-  <rdfs:label>research work</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Work" />
-</owl:Class>
-
-<owl:Class rdf:ID="ResearchAssistant">
-  <rdfs:label>university research assistant</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Student" />
-  <rdfs:subPropertyOf>
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#worksFor" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#ResearchGroup" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </rdfs:subPropertyOf>
-</owl:Class>
-
-<owl:Class rdf:ID="ResearchGroup">
-  <rdfs:label>research group</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Organization" />
-</owl:Class>
-
-<owl:Class rdf:ID="Schedule">
-  <rdfs:label>schedule</rdfs:label>
-</owl:Class>
-
-<owl:Class rdf:ID="Software">
-  <rdfs:label>software program</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Publication" />
-</owl:Class>
-
-<owl:Class rdf:ID="Specification">
-  <rdfs:label>published specification</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Publication" />
-</owl:Class>
-
-<owl:Class rdf:ID="Student">
-  <rdfs:label>student</rdfs:label>
-  <owl:intersectionOf rdf:parseType="Collection">
-  <owl:Class rdf:about="#Person" />
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#takesCourse" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#Course" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </owl:intersectionOf>
-</owl:Class>
-
-<owl:Class rdf:ID="SystemsStaff">
-  <rdfs:label>systems staff worker</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#AdministrativeStaff" />
-</owl:Class>
-
-<owl:Class rdf:ID="TeachingAssistant">
-  <rdfs:label>university teaching assistant</rdfs:label>
-  <owl:intersectionOf rdf:parseType="Collection">
-  <owl:Class rdf:about="#Person" />
-  <owl:Restriction>
-  <owl:onProperty rdf:resource="#teachingAssistantOf" />
-  <owl:someValuesFrom>
-  <owl:Class rdf:about="#Course" />
-  </owl:someValuesFrom>
-  </owl:Restriction>
-  </owl:intersectionOf>
-</owl:Class>
-
-<owl:Class rdf:ID="TechnicalReport">
-  <rdfs:label>technical report</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Article" />
-</owl:Class>
-
-<owl:Class rdf:ID="UndergraduateStudent">
-  <rdfs:label>undergraduate student</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Student" />
-</owl:Class>
-
-<owl:Class rdf:ID="University">
-  <rdfs:label>university</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Organization" />
-</owl:Class>
-
-<owl:Class rdf:ID="UnofficialPublication">
-  <rdfs:label>unnoficial publication</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Publication" />
-</owl:Class>
-
-<owl:Class rdf:ID="VisitingProfessor">
-  <rdfs:label>visiting professor</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#Professor" />
-</owl:Class>
-
-<owl:Class rdf:ID="Work">
-  <rdfs:label>Work</rdfs:label>
-</owl:Class>
-
-<owl:ObjectProperty rdf:ID="advisor">
-  <rdfs:label>is being advised by</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-  <rdfs:range rdf:resource="#Professor" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="affiliatedOrganizationOf">
-  <rdfs:label>is affiliated with</rdfs:label>
-  <rdfs:domain rdf:resource="#Organization" />
-  <rdfs:range rdf:resource="#Organization" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="affiliateOf">
-  <rdfs:label>is affiliated with</rdfs:label>
-  <rdfs:domain rdf:resource="#Organization" />
-  <rdfs:range rdf:resource="#Person" />
-</owl:ObjectProperty>
-
-<owl:DatatypeProperty rdf:ID="age">
-  <rdfs:label>is age</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-</owl:DatatypeProperty>
-
-<owl:ObjectProperty rdf:ID="degreeFrom">
-  <rdfs:label>has a degree from</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-  <rdfs:range rdf:resource="#University" />
-  <owl:inverseOf rdf:resource="#hasAlumnus"/>
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="doctoralDegreeFrom">
-  <rdfs:label>has a doctoral degree from</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-  <rdfs:range rdf:resource="#University" />
-  <rdfs:subPropertyOf rdf:resource="#degreeFrom" />
-</owl:ObjectProperty>
-
-<owl:DatatypeProperty rdf:ID="emailAddress">
-  <rdfs:label>can be reached at</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-</owl:DatatypeProperty>
-
-<owl:ObjectProperty rdf:ID="hasAlumnus">
-  <rdfs:label>has as an alumnus</rdfs:label>
-  <rdfs:domain rdf:resource="#University" />
-  <rdfs:range rdf:resource="#Person" />
-  <owl:inverseOf rdf:resource="#degreeFrom"/>
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="headOf">
-  <rdfs:label>is the head of</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#worksFor"/>
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="listedCourse">
-  <rdfs:label>lists as a course</rdfs:label>
-  <rdfs:domain rdf:resource="#Schedule" />
-  <rdfs:range rdf:resource="#Course" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="mastersDegreeFrom">
-  <rdfs:label>has a masters degree from</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-  <rdfs:range rdf:resource="#University" />
-  <rdfs:subPropertyOf rdf:resource="#degreeFrom"/>
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="member">
-  <rdfs:label>has as a member</rdfs:label>
-  <rdfs:domain rdf:resource="#Organization" />
-  <rdfs:range rdf:resource="#Person" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="memberOf">
-<rdfs:label>member of</rdfs:label>
-<owl:inverseOf rdf:resource="#member" />
-</owl:ObjectProperty>
-
-<owl:DatatypeProperty rdf:ID="name">
-<rdfs:label>name</rdfs:label>
-</owl:DatatypeProperty>
-
-<owl:DatatypeProperty rdf:ID="officeNumber">
-  <rdfs:label>office room No.</rdfs:label>
-</owl:DatatypeProperty>
-
-<owl:ObjectProperty rdf:ID="orgPublication">
-  <rdfs:label>publishes</rdfs:label>
-  <rdfs:domain rdf:resource="#Organization" />
-  <rdfs:range rdf:resource="#Publication" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="publicationAuthor">
-  <rdfs:label>was written by</rdfs:label>
-  <rdfs:domain rdf:resource="#Publication" />
-  <rdfs:range rdf:resource="#Person" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="publicationDate">
-  <rdfs:label>was written on</rdfs:label>
-  <rdfs:domain rdf:resource="#Publication" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="publicationResearch">
-  <rdfs:label>is about</rdfs:label>
-  <rdfs:domain rdf:resource="#Publication" />
-  <rdfs:range rdf:resource="#Research" />
-</owl:ObjectProperty>
-
-<owl:DatatypeProperty rdf:ID="researchInterest">
-  <rdfs:label>is researching</rdfs:label>
-</owl:DatatypeProperty>
-
-<owl:ObjectProperty rdf:ID="researchProject">
-  <rdfs:label>has as a research project</rdfs:label>
-  <rdfs:domain rdf:resource="#ResearchGroup" />
-  <rdfs:range rdf:resource="#Research" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="softwareDocumentation">
-  <rdfs:label>is documented in</rdfs:label>
-  <rdfs:domain rdf:resource="#Software" />
-  <rdfs:range rdf:resource="#Publication" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="softwareVersion">
-  <rdfs:label>is version</rdfs:label>
-  <rdfs:domain rdf:resource="#Software" />
-</owl:ObjectProperty>
-
-<owl:TransitiveProperty rdf:ID="subOrganizationOf">
-  <rdfs:label>is part of</rdfs:label>
-  <rdfs:domain rdf:resource="#Organization" />
-  <rdfs:range rdf:resource="#Organization" />
-</owl:TransitiveProperty>
-
-<owl:ObjectProperty rdf:ID="takesCourse">
-  <rdfs:label>is taking</rdfs:label>
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="teacherOf">
-  <rdfs:label>teaches</rdfs:label>
-  <rdfs:domain rdf:resource="#Faculty" />
-  <rdfs:range rdf:resource="#Course" />
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="teachingAssistantOf">
-  <rdfs:label>is a teaching assistant for</rdfs:label>
-  <rdfs:domain rdf:resource="#TeachingAssistant" />
-  <rdfs:range rdf:resource="#Course" />
-</owl:ObjectProperty>
-
-<owl:DatatypeProperty rdf:ID="telephone">
-  <rdfs:label>telephone number</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-</owl:DatatypeProperty>
-
-<owl:ObjectProperty rdf:ID="tenured">
-  <rdfs:label>is tenured:</rdfs:label>
-  <rdfs:domain rdf:resource="#Professor" />
-</owl:ObjectProperty>
-
-<owl:DatatypeProperty rdf:ID="title">
-  <rdfs:label>title</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-</owl:DatatypeProperty>
-
-<owl:ObjectProperty rdf:ID="undergraduateDegreeFrom">
-  <rdfs:label>has an undergraduate degree from</rdfs:label>
-  <rdfs:domain rdf:resource="#Person" />
-  <rdfs:range rdf:resource="#University" />
-  <rdfs:subPropertyOf rdf:resource="#degreeFrom"/>
-</owl:ObjectProperty>
-
-<owl:ObjectProperty rdf:ID="worksFor">
-  <rdfs:label>Works For</rdfs:label>
-  <rdfs:subPropertyOf rdf:resource="#memberOf" />
-</owl:ObjectProperty>
-
-</rdf:RDF>
-

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java
new file mode 100644
index 0000000..4fcc726
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java
@@ -0,0 +1,179 @@
+package mvm.rya.rdftriplestore;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.api.persist.RyaDAO;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
+import mvm.rya.rdftriplestore.inference.InferenceEngine;
+import mvm.rya.rdftriplestore.namespace.NamespaceManager;
+import mvm.rya.rdftriplestore.provenance.ProvenanceCollector;
+
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.sail.SailConnection;
+import org.openrdf.sail.SailException;
+import org.openrdf.sail.helpers.SailBase;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+public class RdfCloudTripleStore extends SailBase {
+
+    private RdfCloudTripleStoreConfiguration conf;
+
+    protected RyaDAO ryaDAO;
+    protected InferenceEngine inferenceEngine;
+    protected RdfEvalStatsDAO rdfEvalStatsDAO;
+    protected SelectivityEvalDAO selectEvalDAO;
+    private NamespaceManager namespaceManager;
+    protected ProvenanceCollector provenanceCollector;
+
+    private ValueFactory vf = new ValueFactoryImpl();
+
+    @Override
+    protected SailConnection getConnectionInternal() throws SailException {
+        return new RdfCloudTripleStoreConnection(this, conf, vf);
+    }
+
+    @Override
+    protected void initializeInternal() throws SailException {
+        checkNotNull(ryaDAO);
+
+        if (this.conf == null) {
+            this.conf = ryaDAO.getConf();
+        }
+
+        checkNotNull(this.conf);
+
+        try {
+            if (!ryaDAO.isInitialized()) {
+                ryaDAO.setConf(this.conf);
+                ryaDAO.init();
+            }
+        } catch (RyaDAOException e) {
+            throw new SailException(e);
+        }
+
+        if (rdfEvalStatsDAO != null && !rdfEvalStatsDAO.isInitialized()) {
+            rdfEvalStatsDAO.setConf(this.conf);
+            rdfEvalStatsDAO.init();
+        }
+
+        //TODO: Support inferencing with ryadao
+//        if (inferenceEngine != null && !inferenceEngine.isInitialized()) {
+//            inferenceEngine.setConf(this.conf);
+//            inferenceEngine.setRyaDAO(ryaDAO);
+//            inferenceEngine.init();
+//        }
+
+        if (namespaceManager == null) {
+            this.namespaceManager = new NamespaceManager(ryaDAO, this.conf);
+        }
+    }
+
+    @Override
+    protected void shutDownInternal() throws SailException {
+        try {
+            if (namespaceManager != null) {
+                namespaceManager.shutdown();
+            }
+            if (inferenceEngine != null) {
+                inferenceEngine.destroy();
+            }
+            if (rdfEvalStatsDAO != null) {
+                rdfEvalStatsDAO.destroy();
+            }
+            ryaDAO.destroy();
+        } catch (Exception e) {
+            throw new SailException(e);
+        }
+    }
+
+    @Override
+    public ValueFactory getValueFactory() {
+        return vf;
+    }
+
+    @Override
+    public boolean isWritable() throws SailException {
+        return true;
+    }
+
+    public RdfCloudTripleStoreConfiguration getConf() {
+        return conf;
+    }
+
+    public void setConf(RdfCloudTripleStoreConfiguration conf) {
+        this.conf = conf;
+    }
+
+    public RdfEvalStatsDAO getRdfEvalStatsDAO() {
+        return rdfEvalStatsDAO;
+    }
+
+    public void setRdfEvalStatsDAO(RdfEvalStatsDAO rdfEvalStatsDAO) {
+        this.rdfEvalStatsDAO = rdfEvalStatsDAO;
+    }
+    
+    public SelectivityEvalDAO getSelectEvalDAO() {
+        return selectEvalDAO;
+    }
+    
+    public void setSelectEvalDAO(SelectivityEvalDAO selectEvalDAO) {
+        this.selectEvalDAO = selectEvalDAO;
+    }
+
+    public RyaDAO getRyaDAO() {
+        return ryaDAO;
+    }
+
+    public void setRyaDAO(RyaDAO ryaDAO) {
+        this.ryaDAO = ryaDAO;
+    }
+
+    public InferenceEngine getInferenceEngine() {
+        return inferenceEngine;
+    }
+
+    public void setInferenceEngine(InferenceEngine inferenceEngine) {
+        this.inferenceEngine = inferenceEngine;
+    }
+
+    public NamespaceManager getNamespaceManager() {
+        return namespaceManager;
+    }
+
+    public void setNamespaceManager(NamespaceManager namespaceManager) {
+        this.namespaceManager = namespaceManager;
+    }
+
+    public ProvenanceCollector getProvenanceCollector() {
+		return provenanceCollector;
+	}
+
+	public void setProvenanceCollector(ProvenanceCollector provenanceCollector) {
+		this.provenanceCollector = provenanceCollector;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
new file mode 100644
index 0000000..24ec109
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
@@ -0,0 +1,623 @@
+package mvm.rya.rdftriplestore;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import info.aduna.iteration.CloseableIteration;
+
+import java.lang.reflect.Constructor;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.NoSuchElementException;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConstants;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.api.persist.RyaDAO;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
+import mvm.rya.api.persist.utils.RyaDAOHelper;
+import mvm.rya.api.resolver.RdfToRyaConversions;
+import mvm.rya.rdftriplestore.evaluation.FilterRangeVisitor;
+import mvm.rya.rdftriplestore.evaluation.ParallelEvaluationStrategyImpl;
+import mvm.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer;
+import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics;
+import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics;
+import mvm.rya.rdftriplestore.evaluation.SeparateFilterJoinsVisitor;
+import mvm.rya.rdftriplestore.inference.InferenceEngine;
+import mvm.rya.rdftriplestore.inference.InverseOfVisitor;
+import mvm.rya.rdftriplestore.inference.SameAsVisitor;
+import mvm.rya.rdftriplestore.inference.SubClassOfVisitor;
+import mvm.rya.rdftriplestore.inference.SubPropertyOfVisitor;
+import mvm.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
+import mvm.rya.rdftriplestore.inference.TransitivePropertyVisitor;
+import mvm.rya.rdftriplestore.namespace.NamespaceManager;
+import mvm.rya.rdftriplestore.provenance.ProvenanceCollectionException;
+import mvm.rya.rdftriplestore.provenance.ProvenanceCollector;
+import mvm.rya.rdftriplestore.utils.DefaultStatistics;
+
+import org.apache.hadoop.conf.Configurable;
+import org.openrdf.model.Namespace;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ContextStatementImpl;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.query.Binding;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.Dataset;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.QueryRoot;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.evaluation.EvaluationStrategy;
+import org.openrdf.query.algebra.evaluation.QueryBindingSet;
+import org.openrdf.query.algebra.evaluation.QueryOptimizer;
+import org.openrdf.query.algebra.evaluation.TripleSource;
+import org.openrdf.query.algebra.evaluation.impl.BindingAssigner;
+import org.openrdf.query.algebra.evaluation.impl.CompareOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter;
+import org.openrdf.query.algebra.evaluation.impl.ConstantOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.IterativeEvaluationOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.OrderLimitOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.QueryModelNormalizer;
+import org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer;
+import org.openrdf.query.impl.EmptyBindingSet;
+import org.openrdf.sail.SailException;
+import org.openrdf.sail.helpers.SailConnectionBase;
+
+public class RdfCloudTripleStoreConnection extends SailConnectionBase {
+
+    private RdfCloudTripleStore store;
+
+    private RdfEvalStatsDAO rdfEvalStatsDAO;
+    private SelectivityEvalDAO selectEvalDAO;
+    private RyaDAO ryaDAO;
+    private InferenceEngine inferenceEngine;
+    private NamespaceManager namespaceManager;
+    private RdfCloudTripleStoreConfiguration conf;
+    
+
+	private ProvenanceCollector provenanceCollector;
+
+    public RdfCloudTripleStoreConnection(RdfCloudTripleStore sailBase, RdfCloudTripleStoreConfiguration conf, ValueFactory vf)
+            throws SailException {
+        super(sailBase);
+        this.store = sailBase;
+        this.conf = conf;
+        initialize();
+    }
+
+    protected void initialize() throws SailException {
+        refreshConnection();
+    }
+
+    protected void refreshConnection() throws SailException {
+        try {
+            checkNotNull(store.getRyaDAO());
+            checkArgument(store.getRyaDAO().isInitialized());
+            checkNotNull(store.getNamespaceManager());
+
+            this.ryaDAO = store.getRyaDAO();
+            this.rdfEvalStatsDAO = store.getRdfEvalStatsDAO();
+            this.selectEvalDAO = store.getSelectEvalDAO();
+            this.inferenceEngine = store.getInferenceEngine();
+            this.namespaceManager = store.getNamespaceManager();
+            this.provenanceCollector = store.getProvenanceCollector();
+
+        } catch (Exception e) {
+            throw new SailException(e);
+        }
+    }
+
+    @Override
+    protected void addStatementInternal(Resource subject, URI predicate,
+                                        Value object, Resource... contexts) throws SailException {
+        try {
+            String cv_s = conf.getCv();
+            byte[] cv = cv_s == null ? null : cv_s.getBytes();
+            if (contexts != null && contexts.length > 0) {
+                for (Resource context : contexts) {
+                    RyaStatement statement = new RyaStatement(
+                            RdfToRyaConversions.convertResource(subject),
+                            RdfToRyaConversions.convertURI(predicate),
+                            RdfToRyaConversions.convertValue(object),
+                            RdfToRyaConversions.convertResource(context),
+                            null, cv);
+
+                    ryaDAO.add(statement);
+                }
+            } else {
+                RyaStatement statement = new RyaStatement(
+                        RdfToRyaConversions.convertResource(subject),
+                        RdfToRyaConversions.convertURI(predicate),
+                        RdfToRyaConversions.convertValue(object),
+                        null, null, cv);
+
+                ryaDAO.add(statement);
+            }
+        } catch (RyaDAOException e) {
+            throw new SailException(e);
+        }
+    }
+
+    
+    
+    
+    @Override
+    protected void clearInternal(Resource... aresource) throws SailException {
+        try {
+            RyaURI[] graphs = new RyaURI[aresource.length];
+            for (int i = 0 ; i < graphs.length ; i++){
+                graphs[i] = RdfToRyaConversions.convertResource(aresource[i]);
+            }
+            ryaDAO.dropGraph(conf, graphs);
+        } catch (RyaDAOException e) {
+            throw new SailException(e);
+        }
+    }
+
+    @Override
+    protected void clearNamespacesInternal() throws SailException {
+        logger.error("Clear Namespace Repository method not implemented");
+    }
+
+    @Override
+    protected void closeInternal() throws SailException {
+        verifyIsOpen();
+    }
+
+    @Override
+    protected void commitInternal() throws SailException {
+        verifyIsOpen();
+        //There is no transactional layer
+    }
+
+    @Override
+    protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(
+            TupleExpr tupleExpr, Dataset dataset, BindingSet bindings,
+            boolean flag) throws SailException {
+        verifyIsOpen();
+        logger.trace("Incoming query model:\n{}", tupleExpr.toString());
+        if (provenanceCollector != null){
+        	try {
+				provenanceCollector.recordQuery(tupleExpr.toString());
+			} catch (ProvenanceCollectionException e) {
+				// TODO silent fail
+				e.printStackTrace();
+			}
+        }
+        tupleExpr = tupleExpr.clone();
+
+        RdfCloudTripleStoreConfiguration queryConf = store.getConf().clone();
+        if (bindings != null) {
+            Binding dispPlan = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG);
+            if (dispPlan != null) {
+                queryConf.setDisplayQueryPlan(Boolean.parseBoolean(dispPlan.getValue().stringValue()));
+            }
+
+            Binding authBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH);
+            if (authBinding != null) {
+                queryConf.setAuths(authBinding.getValue().stringValue().split(","));
+            }
+
+            Binding ttlBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_TTL);
+            if (ttlBinding != null) {
+                queryConf.setTtl(Long.valueOf(ttlBinding.getValue().stringValue()));
+            }
+
+            Binding startTimeBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_STARTTIME);
+            if (startTimeBinding != null) {
+                queryConf.setStartTime(Long.valueOf(startTimeBinding.getValue().stringValue()));
+            }
+
+            Binding performantBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT);
+            if (performantBinding != null) {
+                queryConf.setBoolean(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT, Boolean.parseBoolean(performantBinding.getValue().stringValue()));
+            }
+
+            Binding inferBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_INFER);
+            if (inferBinding != null) {
+                queryConf.setInfer(Boolean.parseBoolean(inferBinding.getValue().stringValue()));
+            }
+
+            Binding useStatsBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_USE_STATS);
+            if (useStatsBinding != null) {
+                queryConf.setUseStats(Boolean.parseBoolean(useStatsBinding.getValue().stringValue()));
+            }
+
+            Binding offsetBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_OFFSET);
+            if (offsetBinding != null) {
+                queryConf.setOffset(Long.parseLong(offsetBinding.getValue().stringValue()));
+            }
+
+            Binding limitBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_LIMIT);
+            if (limitBinding != null) {
+                queryConf.setLimit(Long.parseLong(limitBinding.getValue().stringValue()));
+            }
+        } else {
+            bindings = new QueryBindingSet();
+        }
+
+        if (!(tupleExpr instanceof QueryRoot)) {
+            tupleExpr = new QueryRoot(tupleExpr);
+        }
+
+        try {
+            List<Class<QueryOptimizer>> optimizers = queryConf.getOptimizers();
+            Class<QueryOptimizer> pcjOptimizer = queryConf.getPcjOptimizer();
+            
+            if(pcjOptimizer != null) {
+                QueryOptimizer opt = null;
+                try {
+                    Constructor<QueryOptimizer> construct = pcjOptimizer.getDeclaredConstructor(new Class[] {});
+                    opt = construct.newInstance();
+                } catch (Exception e) {
+                }
+                if (opt == null) {
+                    throw new NoSuchMethodException("Could not find valid constructor for " + pcjOptimizer.getName());
+                }
+                if (opt instanceof Configurable) {
+                    ((Configurable) opt).setConf(conf);
+                }
+                opt.optimize(tupleExpr, dataset, bindings);
+            }
+            
+            final ParallelEvaluationStrategyImpl strategy = new ParallelEvaluationStrategyImpl(
+                    new StoreTripleSource(queryConf), inferenceEngine, dataset, queryConf);
+            
+                (new BindingAssigner()).optimize(tupleExpr, dataset, bindings);
+                (new ConstantOptimizer(strategy)).optimize(tupleExpr, dataset,
+                        bindings);
+                (new CompareOptimizer()).optimize(tupleExpr, dataset, bindings);
+                (new ConjunctiveConstraintSplitter()).optimize(tupleExpr, dataset,
+                        bindings);
+                (new DisjunctiveConstraintOptimizer()).optimize(tupleExpr, dataset,
+                        bindings);
+                (new SameTermFilterOptimizer()).optimize(tupleExpr, dataset,
+                        bindings);
+                (new QueryModelNormalizer()).optimize(tupleExpr, dataset, bindings);
+    
+                (new IterativeEvaluationOptimizer()).optimize(tupleExpr, dataset,
+                        bindings);
+
+            if (!optimizers.isEmpty()) {
+                for (Class<QueryOptimizer> optclz : optimizers) {
+                    QueryOptimizer result = null;
+                    try {
+                        Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(new Class[] {});
+                        result = meth.newInstance();
+                    } catch (Exception e) {
+                    }
+                    try {
+                        Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(EvaluationStrategy.class);
+                        result = meth.newInstance(strategy);
+                    } catch (Exception e) {
+                    }
+                    if (result == null) {
+                        throw new NoSuchMethodException("Could not find valid constructor for " + optclz.getName());
+                    }
+                    if (result instanceof Configurable) {
+                        ((Configurable) result).setConf(conf);
+                    }
+                    result.optimize(tupleExpr, dataset, bindings);
+                }
+            }
+
+            (new FilterOptimizer()).optimize(tupleExpr, dataset, bindings);
+            (new OrderLimitOptimizer()).optimize(tupleExpr, dataset, bindings);
+            
+            logger.trace("Optimized query model:\n{}", tupleExpr.toString());
+
+            if (queryConf.isInfer()
+                    && this.inferenceEngine != null
+                    ) {
+                try {
+                    tupleExpr.visit(new TransitivePropertyVisitor(queryConf, inferenceEngine));
+                    tupleExpr.visit(new SymmetricPropertyVisitor(queryConf, inferenceEngine));
+                    tupleExpr.visit(new InverseOfVisitor(queryConf, inferenceEngine));
+                    tupleExpr.visit(new SubPropertyOfVisitor(queryConf, inferenceEngine));
+                    tupleExpr.visit(new SubClassOfVisitor(queryConf, inferenceEngine));
+                    tupleExpr.visit(new SameAsVisitor(queryConf, inferenceEngine));
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+            if (queryConf.isPerformant()) {
+                tupleExpr.visit(new SeparateFilterJoinsVisitor());
+//                tupleExpr.visit(new FilterTimeIndexVisitor(queryConf));
+//                tupleExpr.visit(new PartitionFilterTimeIndexVisitor(queryConf));
+            }
+            FilterRangeVisitor rangeVisitor = new FilterRangeVisitor(queryConf);
+            tupleExpr.visit(rangeVisitor);
+            tupleExpr.visit(rangeVisitor); //this has to be done twice to get replace the statementpatterns with the right ranges
+            EvaluationStatistics stats = null;
+            if (!queryConf.isUseStats() && queryConf.isPerformant() || rdfEvalStatsDAO == null) {
+                stats = new DefaultStatistics();
+            } else if (queryConf.isUseStats()) {
+
+                if (queryConf.isUseSelectivity()) {
+                    stats = new RdfCloudTripleStoreSelectivityEvaluationStatistics(queryConf, rdfEvalStatsDAO,
+                            selectEvalDAO);
+                } else {
+                    stats = new RdfCloudTripleStoreEvaluationStatistics(queryConf, rdfEvalStatsDAO);
+                }
+            }
+            if (stats != null) {
+
+                if (stats instanceof RdfCloudTripleStoreSelectivityEvaluationStatistics) {
+
+                    (new QueryJoinSelectOptimizer((RdfCloudTripleStoreSelectivityEvaluationStatistics) stats,
+                            selectEvalDAO)).optimize(tupleExpr, dataset, bindings);
+                } else {
+
+                    (new mvm.rya.rdftriplestore.evaluation.QueryJoinOptimizer(stats)).optimize(tupleExpr, dataset,
+                            bindings); // TODO: Make pluggable
+                }
+            }
+
+            final CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy
+                    .evaluate(tupleExpr, EmptyBindingSet.getInstance());
+            CloseableIteration<BindingSet, QueryEvaluationException> iterWrap = new CloseableIteration<BindingSet, QueryEvaluationException>() {
+                
+                @Override
+                public void remove() throws QueryEvaluationException {
+                  iter.remove();
+                }
+                
+                @Override
+                public BindingSet next() throws QueryEvaluationException {
+                    return iter.next();
+                }
+                
+                @Override
+                public boolean hasNext() throws QueryEvaluationException {
+                    return iter.hasNext();
+                }
+                
+                @Override
+                public void close() throws QueryEvaluationException {
+                    iter.close();
+                    strategy.shutdown();
+                }
+            };
+            return iterWrap;
+        } catch (QueryEvaluationException e) {
+            throw new SailException(e);
+        } catch (Exception e) {
+            throw new SailException(e);
+        }
+    }
+
+    @Override
+    protected CloseableIteration<? extends Resource, SailException> getContextIDsInternal()
+            throws SailException {
+        verifyIsOpen();
+
+        // iterate through all contextids
+        return null;
+    }
+
+    @Override
+    protected String getNamespaceInternal(String s) throws SailException {
+        return namespaceManager.getNamespace(s);
+    }
+
+    @Override
+    protected CloseableIteration<? extends Namespace, SailException> getNamespacesInternal()
+            throws SailException {
+        return namespaceManager.iterateNamespace();
+    }
+
+    @Override
+    protected CloseableIteration<? extends Statement, SailException> getStatementsInternal(
+            Resource subject, URI predicate, Value object, boolean flag,
+            Resource... contexts) throws SailException {
+//        try {
+        //have to do this to get the inferred values
+        //TODO: Will this method reduce performance?
+        final Var subjVar = decorateValue(subject, "s");
+        final Var predVar = decorateValue(predicate, "p");
+        final Var objVar = decorateValue(object, "o");
+        StatementPattern sp = null;
+        final boolean hasContext = contexts != null && contexts.length > 0;
+        final Resource context = (hasContext) ? contexts[0] : null;
+        final Var cntxtVar = decorateValue(context, "c");
+        //TODO: Only using one context here
+        sp = new StatementPattern(subjVar, predVar, objVar, cntxtVar);
+        //return new StoreTripleSource(store.getConf()).getStatements(resource, uri, value, contexts);
+        final CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluate = evaluate(sp, null, null, false);
+        return new CloseableIteration<Statement, SailException>() {  //TODO: Use a util class to do this
+            private boolean isClosed = false;
+
+            @Override
+            public void close() throws SailException {
+            isClosed = true;
+                try {
+                    evaluate.close();
+                } catch (QueryEvaluationException e) {
+                    throw new SailException(e);
+                }
+            }
+
+            @Override
+            public boolean hasNext() throws SailException {
+                try {
+                    return evaluate.hasNext();
+                } catch (QueryEvaluationException e) {
+                    throw new SailException(e);
+                }
+            }
+
+            @Override
+            public Statement next() throws SailException {
+                if (!hasNext() || isClosed) {
+                    throw new NoSuchElementException();
+                }
+
+                try {
+                    BindingSet next = evaluate.next();
+                    Resource bs_subj = (Resource) ((subjVar.hasValue()) ? subjVar.getValue() : next.getBinding(subjVar.getName()).getValue());
+                    URI bs_pred = (URI) ((predVar.hasValue()) ? predVar.getValue() : next.getBinding(predVar.getName()).getValue());
+                    Value bs_obj = (objVar.hasValue()) ? objVar.getValue() : (Value) next.getBinding(objVar.getName()).getValue();
+                    Binding b_cntxt = next.getBinding(cntxtVar.getName());
+
+                    //convert BindingSet to Statement
+                    if (b_cntxt != null) {
+                        return new ContextStatementImpl(bs_subj, bs_pred, bs_obj, (Resource) b_cntxt.getValue());
+                    } else {
+                        return new StatementImpl(bs_subj, bs_pred, bs_obj);
+                    }
+                } catch (QueryEvaluationException e) {
+                    throw new SailException(e);
+                }
+            }
+
+            @Override
+            public void remove() throws SailException {
+                try {
+                    evaluate.remove();
+                } catch (QueryEvaluationException e) {
+                    throw new SailException(e);
+                }
+            }
+        };
+//        } catch (QueryEvaluationException e) {
+//            throw new SailException(e);
+//        }
+    }
+
+    protected Var decorateValue(Value val, String name) {
+        if (val == null) {
+            return new Var(name);
+        } else {
+            return new Var(name, val);
+        }
+    }
+
+    @Override
+    protected void removeNamespaceInternal(String s) throws SailException {
+        namespaceManager.removeNamespace(s);
+    }
+
+    @Override
+    protected void removeStatementsInternal(Resource subject, URI predicate,
+                                            Value object, Resource... contexts) throws SailException {
+        if (!(subject instanceof URI)) {
+            throw new SailException("Subject[" + subject + "] must be URI");
+        }
+
+        try {
+            if (contexts != null && contexts.length > 0) {
+                for (Resource context : contexts) {
+                    if (!(context instanceof URI)) {
+                        throw new SailException("Context[" + context + "] must be URI");
+                    }
+                    RyaStatement statement = new RyaStatement(
+                            RdfToRyaConversions.convertResource(subject),
+                            RdfToRyaConversions.convertURI(predicate),
+                            RdfToRyaConversions.convertValue(object),
+                            RdfToRyaConversions.convertResource(context));
+
+                    ryaDAO.delete(statement, conf);
+                }
+            } else {
+                RyaStatement statement = new RyaStatement(
+                        RdfToRyaConversions.convertResource(subject),
+                        RdfToRyaConversions.convertURI(predicate),
+                        RdfToRyaConversions.convertValue(object),
+                        null);
+
+                ryaDAO.delete(statement, conf);
+            }
+        } catch (RyaDAOException e) {
+            throw new SailException(e);
+        }
+    }
+
+    @Override
+    protected void rollbackInternal() throws SailException {
+        //TODO: No transactional layer as of yet
+    }
+
+    @Override
+    protected void setNamespaceInternal(String s, String s1)
+            throws SailException {
+        namespaceManager.addNamespace(s, s1);
+    }
+
+    @Override
+    protected long sizeInternal(Resource... contexts) throws SailException {
+        logger.error("Cannot determine size as of yet");
+
+        return 0;
+    }
+
+    @Override
+    protected void startTransactionInternal() throws SailException {
+        //TODO: ?
+    }
+
+    public class StoreTripleSource implements TripleSource {
+
+        private RdfCloudTripleStoreConfiguration conf;
+
+        public StoreTripleSource(RdfCloudTripleStoreConfiguration conf) {
+            this.conf = conf;
+        }
+
+        public CloseableIteration<Statement, QueryEvaluationException> getStatements(
+                Resource subject, URI predicate, Value object,
+                Resource... contexts) throws QueryEvaluationException {
+            return RyaDAOHelper.query(ryaDAO, subject, predicate, object, conf, contexts);
+        }
+
+        public CloseableIteration<? extends Entry<Statement, BindingSet>, QueryEvaluationException> getStatements(
+                Collection<Map.Entry<Statement, BindingSet>> statements,
+                Resource... contexts) throws QueryEvaluationException {
+
+            return RyaDAOHelper.query(ryaDAO, statements, conf);
+        }
+
+        public ValueFactory getValueFactory() {
+            return RdfCloudTripleStoreConstants.VALUE_FACTORY;
+        }
+    }
+    
+    public InferenceEngine getInferenceEngine() {
+        return inferenceEngine;
+    }
+    public RdfCloudTripleStoreConfiguration getConf() {
+        return conf;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java
new file mode 100644
index 0000000..42f1aa4
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java
@@ -0,0 +1,56 @@
+package mvm.rya.rdftriplestore;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.sail.Sail;
+import org.openrdf.sail.config.SailConfigException;
+import org.openrdf.sail.config.SailFactory;
+import org.openrdf.sail.config.SailImplConfig;
+
+public class RdfCloudTripleStoreFactory implements SailFactory {
+
+	public static final String SAIL_TYPE = "openrdf:RdfCloudTripleStore";
+
+	@Override
+	public SailImplConfig getConfig() {
+		return new RdfCloudTripleStoreSailConfig();
+	}
+
+	@Override
+	public Sail getSail(SailImplConfig config) throws SailConfigException {
+//		RdfCloudTripleStore cbStore = new RdfCloudTripleStore();
+//		RdfCloudTripleStoreSailConfig cbconfig = (RdfCloudTripleStoreSailConfig) config;
+//		cbStore.setServer(cbconfig.getServer());
+//		cbStore.setPort(cbconfig.getPort());
+//		cbStore.setInstance(cbconfig.getInstance());
+//		cbStore.setPassword(cbconfig.getPassword());
+//		cbStore.setUser(cbconfig.getUser());
+//		return cbStore;
+        return null; //TODO: How?
+	}
+
+	@Override
+	public String getSailType() {
+		return SAIL_TYPE;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java
new file mode 100644
index 0000000..6542b55
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java
@@ -0,0 +1,133 @@
+package mvm.rya.rdftriplestore;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.model.*;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.util.GraphUtil;
+import org.openrdf.model.util.GraphUtilException;
+import org.openrdf.sail.config.SailConfigException;
+import org.openrdf.sail.config.SailImplConfigBase;
+
+public class RdfCloudTripleStoreSailConfig extends SailImplConfigBase {
+    
+    public static final String NAMESPACE = "http://www.openrdf.org/config/sail/cloudbasestore#";
+
+	public static final URI SERVER;
+	public static final URI PORT;
+	public static final URI INSTANCE;
+	public static final URI USER;
+	public static final URI PASSWORD;
+
+    static {
+		ValueFactory factory = ValueFactoryImpl.getInstance();
+		SERVER = factory.createURI(NAMESPACE, "server");
+		PORT = factory.createURI(NAMESPACE, "port");
+		INSTANCE = factory.createURI(NAMESPACE, "instance");
+		USER = factory.createURI(NAMESPACE, "user");
+		PASSWORD = factory.createURI(NAMESPACE, "password");
+	}
+
+	private String server = "stratus13";
+
+	private int port = 2181;
+
+	private String user = "root";
+
+	private String password = "password";
+	
+	private String instance = "stratus";
+
+	public String getServer() {
+		return server;
+	}
+
+	public void setServer(String server) {
+		this.server = server;
+	}
+
+	public int getPort() {
+		return port;
+	}
+
+	public void setPort(int port) {
+		this.port = port;
+	}
+
+	public String getUser() {
+		return user;
+	}
+
+	public void setUser(String user) {
+		this.user = user;
+	}
+
+	public String getPassword() {
+		return password;
+	}
+
+	public void setPassword(String password) {
+		this.password = password;
+	}
+
+	public String getInstance() {
+		return instance;
+	}
+
+	public void setInstance(String instance) {
+		this.instance = instance;
+	}
+
+    @Override
+	public void parse(Graph graph, Resource implNode)
+		throws SailConfigException
+	{
+		super.parse(graph, implNode);
+        System.out.println("parsing");
+
+		try {
+			Literal serverLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, SERVER);
+			if (serverLit != null) {
+				setServer(serverLit.getLabel());
+			}
+			Literal portLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, PORT);
+			if (portLit != null) {
+				setPort(Integer.parseInt(portLit.getLabel()));
+			}
+			Literal instList = GraphUtil.getOptionalObjectLiteral(graph, implNode, INSTANCE);
+			if (instList != null) {
+				setInstance(instList.getLabel());
+			}
+			Literal userLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, USER);
+			if (userLit != null) {
+				setUser(userLit.getLabel());
+			}
+			Literal pwdLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, PASSWORD);
+			if (pwdLit != null) {
+				setPassword(pwdLit.getLabel());
+			}
+		}
+		catch (GraphUtilException e) {
+			throw new SailConfigException(e.getMessage(), e);
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java b/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java
new file mode 100644
index 0000000..7003398
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java
@@ -0,0 +1,53 @@
+package mvm.rya.rdftriplestore;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.sail.Sail;
+import org.openrdf.sail.SailException;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: RoshanP
+ * Date: 3/23/12
+ * Time: 10:05 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class RyaSailRepository extends SailRepository{
+    public RyaSailRepository(Sail sail) {
+        super(sail);
+    }
+
+    @Override
+    public SailRepositoryConnection getConnection() throws RepositoryException {
+        try
+        {
+            return new RyaSailRepositoryConnection(this, this.getSail().getConnection());
+        }
+        catch(SailException e)
+        {
+            throw new RepositoryException(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java b/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java
new file mode 100644
index 0000000..6261b8c
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java
@@ -0,0 +1,109 @@
+package mvm.rya.rdftriplestore;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+
+import mvm.rya.rdftriplestore.utils.CombineContextsRdfInserter;
+
+import org.openrdf.OpenRDFUtil;
+import org.openrdf.model.Resource;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.repository.util.RDFLoader;
+import org.openrdf.rio.RDFFormat;
+import org.openrdf.rio.RDFHandlerException;
+import org.openrdf.rio.RDFParseException;
+import org.openrdf.sail.SailConnection;
+
+/**
+ * The real reason for this is so that we can combine contexts from an input stream/reader and the given contexts in the add function
+ */
+public class RyaSailRepositoryConnection extends SailRepositoryConnection {
+
+    protected RyaSailRepositoryConnection(SailRepository repository, SailConnection sailConnection) {
+        super(repository, sailConnection);
+    }
+
+    @Override
+    public void add(InputStream in, String baseURI, RDFFormat dataFormat, Resource... contexts) throws IOException, RDFParseException,
+            RepositoryException {
+        OpenRDFUtil.verifyContextNotNull(contexts);
+
+        CombineContextsRdfInserter rdfInserter = new CombineContextsRdfInserter(this);
+        rdfInserter.enforceContext(contexts);
+
+        boolean localTransaction = startLocalTransaction();
+        try {
+            RDFLoader loader = new RDFLoader(getParserConfig(), getValueFactory());
+            loader.load(in, baseURI, dataFormat, rdfInserter);
+
+            conditionalCommit(localTransaction);
+        } catch (RDFHandlerException e) {
+            conditionalRollback(localTransaction);
+
+            throw ((RepositoryException) e.getCause());
+        } catch (RDFParseException e) {
+            conditionalRollback(localTransaction);
+            throw e;
+        } catch (IOException e) {
+            conditionalRollback(localTransaction);
+            throw e;
+        } catch (RuntimeException e) {
+            conditionalRollback(localTransaction);
+            throw e;
+        }
+    }
+
+    @Override
+    public void add(Reader reader, String baseURI, RDFFormat dataFormat, Resource... contexts) throws IOException, RDFParseException,
+            RepositoryException {
+        OpenRDFUtil.verifyContextNotNull(contexts);
+
+        CombineContextsRdfInserter rdfInserter = new CombineContextsRdfInserter(this);
+        rdfInserter.enforceContext(contexts);
+
+        boolean localTransaction = startLocalTransaction();
+        try {
+            RDFLoader loader = new RDFLoader(getParserConfig(), getValueFactory());
+            loader.load(reader, baseURI, dataFormat, rdfInserter);
+
+            conditionalCommit(localTransaction);
+        } catch (RDFHandlerException e) {
+            conditionalRollback(localTransaction);
+
+            throw ((RepositoryException) e.getCause());
+        } catch (RDFParseException e) {
+            conditionalRollback(localTransaction);
+            throw e;
+        } catch (IOException e) {
+            conditionalRollback(localTransaction);
+            throw e;
+        } catch (RuntimeException e) {
+            conditionalRollback(localTransaction);
+            throw e;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java
new file mode 100644
index 0000000..b84104a
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java
@@ -0,0 +1,33 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+
+import java.util.Collection;
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+
+public interface ExternalBatchingIterator {
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(Collection<BindingSet> bindingset) throws QueryEvaluationException;
+}



[28/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml
----------------------------------------------------------------------
diff --git a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml b/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml
index 47dced1..4ef9ac0 100644
--- a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml
+++ b/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring.xml
@@ -1,13 +1,23 @@
+<?xml version='1.0'?>
 
-<!-- Copyright (C) 2008 PROTEUS Technologies, LLC This program is free software: 
-	you can redistribute it and/or modify it under the terms of the GNU General 
-	Public License as published by the Free Software Foundation, either version 
-	3 of the License, or (at your option) any later version. This program is 
-	distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; 
-	without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR 
-	PURPOSE. See the GNU General Public License for more details. You should 
-	have received a copy of the GNU General Public License along with this program. 
-	If not, see <http://www.gnu.org/licenses/>. -->
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
 
 <beans xmlns="http://www.springframework.org/schema/beans"
 	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml
----------------------------------------------------------------------
diff --git a/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml b/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml
index c3b5339..cec91f6 100644
--- a/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml
+++ b/osgi/alx.rya/src/main/resources/ROOT/crossdomain.xml
@@ -1,5 +1,25 @@
 <?xml version="1.0"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <!DOCTYPE cross-domain-policy SYSTEM "http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd">
 <cross-domain-policy>
     <allow-access-from domain="*" secure="false"/>
-</cross-domain-policy>
\ No newline at end of file
+</cross-domain-policy>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/camel.rya/pom.xml
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/pom.xml b/osgi/camel.rya/pom.xml
index 951c010..ade8e03 100644
--- a/osgi/camel.rya/pom.xml
+++ b/osgi/camel.rya/pom.xml
@@ -1,35 +1,54 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.osgi</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <groupId>mvm.rya</groupId>
+
     <artifactId>camel.rya</artifactId>
+    <name>Apache Rya Camel</name>
+
     <packaging>bundle</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <properties>
-        <camel.version>2.7.2</camel.version>
-    </properties>
+
     <dependencies>
         <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.sail</artifactId>
+        </dependency>
+
+        <dependency>
             <groupId>org.apache.camel</groupId>
             <artifactId>camel-core</artifactId>
-            <version>${camel.version}</version>
         </dependency>
         <dependency>
             <groupId>org.apache.camel</groupId>
             <artifactId>camel-test</artifactId>
-            <version>${camel.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.sail.impl</artifactId>
         </dependency>
+
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>accumulo.rya</artifactId>
             <scope>test</scope>
         </dependency>
@@ -48,35 +67,4 @@
         </plugins>
     </build>
 
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                    <scope>test</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                    <optional>true</optional>
-                    <scope>test</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java
index 31f864c..0bbc07c 100644
--- a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java
+++ b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailComponent.java
@@ -1,25 +1,26 @@
 package mvm.rya.camel.cbsail;
 
 /*
- * #%L
- * mvm.rya.camel.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.camel.Endpoint;
 import org.apache.camel.impl.DefaultComponent;
 import org.openrdf.model.ValueFactory;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java
index 20a32d9..4a89291 100644
--- a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java
+++ b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailEndpoint.java
@@ -1,25 +1,26 @@
 package mvm.rya.camel.cbsail;
 
 /*
- * #%L
- * mvm.rya.camel.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.camel.*;
 import org.apache.camel.impl.DefaultEndpoint;
 import org.openrdf.repository.Repository;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java
index 75a39ac..ce3ff55 100644
--- a/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java
+++ b/osgi/camel.rya/src/main/java/mvm/rya/camel/cbsail/CbSailProducer.java
@@ -1,25 +1,26 @@
 package mvm.rya.camel.cbsail;
 
 /*
- * #%L
- * mvm.rya.camel.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.camel.Exchange;
 import org.apache.camel.impl.DefaultProducer;
 import org.openrdf.model.Statement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java
index 8713b3a..d4f53da 100644
--- a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java
+++ b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailIntegrationTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.camel.cbsail;
 
 /*
- * #%L
- * mvm.rya.camel.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.camel.cbsail.CbSailComponent;
 import org.apache.camel.EndpointInject;
 import org.apache.camel.Exchange;
@@ -113,4 +114,4 @@ public class CbSailIntegrationTest extends CamelTestSupport {
         };
     }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java
index 83b9f65..ddb056e 100644
--- a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java
+++ b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailPojoMain.java
@@ -1,25 +1,26 @@
 package mvm.rya.camel.cbsail;
 
 /*
- * #%L
- * mvm.rya.camel.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.camel.cbsail.CbSailComponent;
 import org.apache.camel.EndpointInject;
 import org.apache.camel.ProducerTemplate;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java
index d95ce25..c52d094 100644
--- a/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java
+++ b/osgi/camel.rya/src/test/java/mvm/rya/camel/cbsail/CbSailTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.camel.cbsail;
 
 /*
- * #%L
- * mvm.rya.camel.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.accumulo.AccumuloRdfConfiguration;
 import mvm.rya.accumulo.AccumuloRyaDAO;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/pom.xml
----------------------------------------------------------------------
diff --git a/osgi/pom.xml b/osgi/pom.xml
index 54bc1f0..e0a8992 100644
--- a/osgi/pom.xml
+++ b/osgi/pom.xml
@@ -1,24 +1,61 @@
 <?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
-    <properties>
-        <maven-bundle-plugin.version>2.1.0</maven-bundle-plugin.version>
-    </properties>
     <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.osgi</artifactId>
+    <name>Apache Rya OSGI Bundle</name>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
+    <properties>
+        <maven-bundle-plugin.version>2.1.0</maven-bundle-plugin.version>
+    </properties>
+
     <modules>
         <module>alx.rya</module>
         <module>alx.rya.console</module>
         <module>camel.rya</module>
+        <!-- Disabling and documented in RYA-8 -->
+        <!--   <module>sesame-runtime-osgi</module> -->
     </modules>
     <build>
+        <plugins>
+            <!-- Move to subproject when resolving RYA-8 -->
+            <plugin>
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <configuration>
+                    <excludes combine.children="append">
+                        <exclude>sesame-runtime-osgi/openrdf-sesame-osgi.bnd</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+        </plugins>
         <pluginManagement>
             <plugins>
                 <plugin>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/sesame-runtime-osgi/pom.xml
----------------------------------------------------------------------
diff --git a/osgi/sesame-runtime-osgi/pom.xml b/osgi/sesame-runtime-osgi/pom.xml
index db1a4bb..c454a66 100644
--- a/osgi/sesame-runtime-osgi/pom.xml
+++ b/osgi/sesame-runtime-osgi/pom.xml
@@ -1,14 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-		<groupId>mvm.rya</groupId>
-    	<artifactId>rya.osgi</artifactId>
-    	<version>3.0.4-SNAPSHOT</version>
-	</parent>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya.osgi</artifactId>
+        <version>3.2.10-SNAPSHOT</version>
+    </parent>
+
     <artifactId>sesame-runtime-osgi</artifactId>
-    <version>2.6.4</version>
+    <name>Sesame Runtime for OSGI</name>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <dependencies>
         <dependency>
             <groupId>org.openrdf.sesame</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/pom.xml
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/pom.xml b/partition/common-query-ext/pom.xml
deleted file mode 100644
index 4fb0aee..0000000
--- a/partition/common-query-ext/pom.xml
+++ /dev/null
@@ -1,71 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <!--<parent>-->
-    <!--<groupId>sitestore</groupId>-->
-    <!--<artifactId>sitestore</artifactId>-->
-    <!--<version>2.0.0-SNAPSHOT</version>-->
-    <!--</parent>-->
-
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
-        <version>2.0.0-SNAPSHOT</version>
-    </parent>
-
-    <groupId>sitestore.common</groupId>
-    <artifactId>common-query-ext</artifactId>
-    <name>common-query (${project.version})</name>
-    <version>1.0.0-SNAPSHOT</version>
-    <description>A set of filters and iterators for cloudbase queries</description>
-
-    <properties>
-        <skipTests>true</skipTests>
-    </properties>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-source-plugin</artifactId>
-                <version>2.1.2</version>
-                <executions>
-                    <execution>
-                        <id>attach-sources</id>
-                        <phase>install</phase>
-                        <goals>
-                            <goal>jar</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <version>2.7.2</version>
-                <configuration>
-                    <skipTests>${skipTests}</skipTests>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-    <!--<scm>-->
-    <!--<connection>${scmLocation}/tto/ss/common/trunk/common-query</connection>-->
-    <!--</scm>-->
-    <dependencies>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>sitestore.common</groupId>
-            <artifactId>common-query</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-        </dependency>
-    </dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/main/java/ss/cloudbase/core/iterators/ext/EncodedSortedRangeIterator.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/main/java/ss/cloudbase/core/iterators/ext/EncodedSortedRangeIterator.java b/partition/common-query-ext/src/main/java/ss/cloudbase/core/iterators/ext/EncodedSortedRangeIterator.java
deleted file mode 100644
index fb59102..0000000
--- a/partition/common-query-ext/src/main/java/ss/cloudbase/core/iterators/ext/EncodedSortedRangeIterator.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package ss.cloudbase.core.iterators.ext;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.io.Text;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Class EncodedSortedRangeIterator
- * Date: Aug 10, 2011
- * Time: 10:37:28 AM
- */
-public class EncodedSortedRangeIterator extends SortedRangeIterator {
-
-    @Override
-    public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-        super.init(source, options, env);
-        if (options.containsKey(OPTION_LOWER_BOUND)) {
-            lower = new Text(decode(options.get(OPTION_LOWER_BOUND)));
-        } else {
-            lower = new Text("\u0000");
-        }
-
-        if (options.containsKey(OPTION_UPPER_BOUND)) {
-            upper = new Text(decode(options.get(OPTION_UPPER_BOUND)));
-        } else {
-            upper = new Text("\u0000");
-        }
-    }
-
-    public static String encode(String str) {
-        return new String(Base64.encodeBase64(str.getBytes()));
-    }
-
-    public static String decode(String str) {
-        return new String(Base64.decodeBase64(str.getBytes()));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/GVDateFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/GVDateFilterTest.java b/partition/common-query-ext/src/test/java/GVDateFilterTest.java
deleted file mode 100644
index 8ea5578..0000000
--- a/partition/common-query-ext/src/test/java/GVDateFilterTest.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.filter.general.GVDateFilter;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.security.Authorizations;
-
-/**
- *
- * @author rashah
- */
-public class GVDateFilterTest
-{
-
-  private Connector cellLevelConn;
-  private Connector serializedConn;
-  private static final String TABLE = "partition";
-  private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-
-
-
-  protected Connector getSerializedConnector()
-  {
-    if (serializedConn == null)
-    {
-      serializedConn = SampleGVData.initConnector();
-      SampleGVData.writeDenSerialized(serializedConn, SampleGVData.sampleData());
-    }
-    return serializedConn;
-  }
-
-
-
-  protected Scanner getSerializedScanner()
-  {
-    Connector c = getSerializedConnector();
-    try
-    {
-      return c.createScanner(TABLE, AUTHS);
-    }
-    catch (TableNotFoundException e)
-    {
-      return null;
-    }
-  }
-
-  protected Scanner setUpGVDFFilter(Scanner s, String timesta)
-  {
-    try
-    {
-  
-      s.setScanIterators(50, FilteringIterator.class.getName(), "gvdf");
-      s.setScanIteratorOption("gvdf", "0", GVDateFilter.class.getName());
-      s.setScanIteratorOption("gvdf", "0." + GVDateFilter.OPTIONInTimestamp, timesta);
-
-    }
-    catch (IOException e)
-    {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-    }
-    return s;
-  }
-
-  protected String checkSerialized(Scanner s)
-  {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (Entry<Key, Value> e : s)
-    {
-
-      if (!first)
-      {
-        sb.append(",");
-      }
-      else
-      {
-        first = false;
-      }
-
-      String colq = e.getKey().getColumnQualifier().toString();
-
-      sb.append(colq);
-    }
-    return sb.toString();
-  }
-
-
-  @Test
-  public void testNoResults()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2008-03-03T20:44:28.633Z");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals(""));
-  }
-
-
-  @Test
-  public void testOneResult()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2011-03-03T20:44:28.633Z");
-    s.setRange(new Range());
-
-    System.out.println(checkSerialized(s));
-
-    assertTrue(checkSerialized(s).equals("03"));
-  }
-
-  @Test
-  public void testTwoResults()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2009-03-03T20:44:28.633Z");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("04,01"));
-  }
-
-    @Test
-  public void testThreeResults()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2010-03-01T20:44:28.633Z");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("04,01,03"));
-  }
-
-  @Test
-  public void testDummyTest()
-  {
-    assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/GVFrequencyFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/GVFrequencyFilterTest.java b/partition/common-query-ext/src/test/java/GVFrequencyFilterTest.java
deleted file mode 100644
index 25c602a..0000000
--- a/partition/common-query-ext/src/test/java/GVFrequencyFilterTest.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.filter.general.GVFrequencyFilter;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.security.Authorizations;
-
-/**
- *
- * @author rashah
- */
-public class GVFrequencyFilterTest
-{
-
-  private Connector cellLevelConn;
-  private Connector serializedConn;
-  private static final String TABLE = "partition";
-  private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-
-
-
-  protected Connector getSerializedConnector()
-  {
-    if (serializedConn == null)
-    {
-      serializedConn = SampleGVData.initConnector();
-      SampleGVData.writeDenSerialized(serializedConn, SampleGVData.sampleData());
-    }
-    return serializedConn;
-  }
-
-
-
-  protected Scanner getSerializedScanner()
-  {
-    Connector c = getSerializedConnector();
-    try
-    {
-      return c.createScanner(TABLE, AUTHS);
-    }
-    catch (TableNotFoundException e)
-    {
-      return null;
-    }
-  }
-
-  protected Scanner setUpGVDFFilter(Scanner s, String Frequency)
-  {
-    try
-    {
-      s.clearScanIterators();
-  
-      s.setScanIterators(50, FilteringIterator.class.getName(), "gvff");
-      s.setScanIteratorOption("gvff", "0", GVFrequencyFilter.class.getName());
-      s.setScanIteratorOption("gvff", "0." + GVFrequencyFilter.OPTIONFrequency, Frequency);
-
-    }
-    catch (IOException e)
-    {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-    }
-    return s;
-  }
-
-  protected String checkSerialized(Scanner s)
-  {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (Entry<Key, Value> e : s)
-    {
-
-      if (!first)
-      {
-        sb.append(",");
-      }
-      else
-      {
-        first = false;
-      }
-
-      String colq = e.getKey().getColumnQualifier().toString();
-
-      //System.out.println(e.getKey()+"\t"+e.getValue());
-
-      sb.append(colq);
-    }
-    return sb.toString();
-  }
-
-  @Test
-  public void testNoMatch()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2000000000");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).isEmpty());
-  }
-
-  @Test
-  public void testSingleMatch()
-  {
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "1500000000");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("01"));
-  }
-
-
-  @Test
-  public void testDoubleMatch()
-  {
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "1200000000");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("01,03"));
-  }
-
-  @Test
-  public void testDummyTest()
-  {
-    assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/IteratorTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/IteratorTest.java b/partition/common-query-ext/src/test/java/IteratorTest.java
deleted file mode 100644
index 1b5cf14..0000000
--- a/partition/common-query-ext/src/test/java/IteratorTest.java
+++ /dev/null
@@ -1,554 +0,0 @@
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.CellLevelFilteringIterator;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.ConversionIterator;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedMinIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.UniqueIterator;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-
-public class IteratorTest {
-	private Connector cellLevelConn;
-	private Connector serializedConn;
-	
-	private static final String TABLE = "partition";
-	private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-	
-	public IteratorTest() {
-		
-	}
-	
-	protected Connector getCellLevelConnector() {
-		if (cellLevelConn == null) {
-			cellLevelConn = SampleData.initConnector();
-			SampleData.writeDenCellLevel(cellLevelConn, SampleData.sampleData());
-		}
-		return cellLevelConn;
-	}
-	
-	protected Connector getSerializedConnector() {
-		if (serializedConn == null) {
-			serializedConn = SampleData.initConnector();
-			SampleData.writeDenSerialized(serializedConn, SampleData.sampleData());
-			SampleData.writeDenProvenance(serializedConn);
-			SampleData.writeMinIndexes(serializedConn);
-		}
-		return serializedConn;
-	}
-	
-	protected Scanner getProvenanceScanner() {
-		Connector c = getSerializedConnector();
-		try {
-			return c.createScanner("provenance", AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner getCellLevelScanner() {
-		Connector c = getCellLevelConnector();
-		try {
-			return c.createScanner(TABLE, AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner getSerializedScanner() {
-		Connector c = getSerializedConnector();
-		try {
-			return c.createScanner(TABLE, AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner setUpIntersectingIterator(Scanner s, Text[] terms, boolean multiDoc) {
-		try {
-			s.setScanIterators(50, GMDenIntersectingIterator.class.getName(), "ii");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, "index");
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, "event");
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + multiDoc);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(terms));
-		return s;
-	}
-	
-	protected String checkSerialized(Scanner s) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		for (Entry<Key, Value> e: s) {
-			if (!first) {
-				sb.append(",");
-			} else {
-				first = false;
-			}
-			
-			String colq = e.getKey().getColumnQualifier().toString();
-			
-			sb.append(colq);
-		}
-		return sb.toString();
-	}
-	
-	protected String checkCellLevel(Scanner s) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		for (Entry<Key, Value> e: s) {
-			String colq = e.getKey().getColumnQualifier().toString();
-			int i = colq.indexOf("\u0000");
-			if (i > -1) {
-				if (!first) {
-					sb.append(",");
-				} else {
-					first = false;
-				}
-				sb.append(colq.substring(0, i));
-				sb.append(".");
-				sb.append(colq.substring(i + 1));
-				sb.append("=");
-				sb.append(e.getValue().toString());
-			}
-		}
-		return sb.toString();
-	}
-	
-	@Test
-	public void testSerializedSingleDuplicate() {
-		Text[] terms = new Text[] {
-			new Text("A"),
-			new Text("A")
-		};
-		
-		String test = "01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelSingleDuplicate() {
-		Text[] terms = new Text[] {
-			new Text("A"),
-			new Text("A")
-		};
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedTwoTerms() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		// all the evens will come first
-		String test = "02,01,03";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelTwoTerms() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedTwoTermsWithRange() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range(new Key(new Text("0")), true, new Key(new Text("1")), false));
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelTwoTermsWithRange() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range(new Key(new Text("0")), true, new Key(new Text("1")), false));
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedSingleRange() {
-		Text[] terms = new Text[] {
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true)),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSerializedMultiRange() {
-		Text[] terms = new Text[] {
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true)),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "B", true, "C", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSerializedTermAndRange() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "E", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	protected Scanner setUpSortedRangeIterator(Scanner s, boolean multiDoc) {
-		try {
-			s.setScanIterators(50, SortedRangeIterator.class.getName(), "ri");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, "index");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, "event");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, "A");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, "C");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_START_INCLUSIVE, "true");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_END_INCLUSIVE, "true");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + multiDoc);
-			return s;
-		} catch (IOException e) {
-			e.printStackTrace();
-			return null;
-		}
-	}
-	
-	@Test
-	public void testSerializedSortedRangeIterator() {
-		Scanner s = setUpSortedRangeIterator(getSerializedScanner(), false);
-		String test = "02,01,03";
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelSortedRangeIterator() {
-		Scanner s = setUpSortedRangeIterator(getCellLevelScanner(), true);
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testUniqueIterator() {
-		Scanner s = getProvenanceScanner();
-		try {
-			s.setScanIterators(50, UniqueIterator.class.getName(), "skipper");
-			Key start = new Key(new Text("sid1"));
-			s.setRange(new Range(start, start.followingKey(PartialKey.ROW)));
-			
-			int count = 0;
-			for (Entry<Key, Value> e: s) {
-				count++;
-			}
-			
-			assertEquals(count, 3);
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	protected Scanner setUpConversionIterator(Scanner s) {
-		String[] conversions = new String[] {
-			"field0 + 10",
-			"field1 - 10",
-			"field2 * 10",
-			"field3 / 10",
-			"field4 % 10"
-		};
-		
-		try {
-			s.setScanIterators(50, ConversionIterator.class.getName(), "ci");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("ci", ConversionIterator.OPTION_CONVERSIONS, ConversionIterator.encodeConversions(conversions));
-		Key start = new Key(new Text("1"), new Text("event"), new Text("01"));
-		s.setRange(new Range(start, true, start.followingKey(PartialKey.ROW_COLFAM_COLQUAL), false));
-		
-		return s;
-	}
-	
-	@Test
-	public void testConversionIteratorSerialized() {
-		Scanner s = getSerializedScanner();
-		s = setUpConversionIterator(s);
-		
-		CBConverter c = new CBConverter();
-		
-		boolean test = true;
-		Map<String, Double> expected = new HashMap<String, Double>();
-		
-		expected.put("field0", 20.0);
-		expected.put("field1", 1.0);
-		expected.put("field2", 120.0);
-		expected.put("field3", 1.3);
-		expected.put("field4", 4.0);
-		
-		Map<String, String> record;
-		
-		for (Entry<Key, Value> e: s) {
-			record = c.toMap(e.getKey(), e.getValue());
-			
-			for (Entry<String, String> pair: record.entrySet()) {
-				test = test && expected.get(pair.getKey()).equals(new Double(Double.parseDouble(record.get(pair.getKey()))));
-			}
-		}
-		
-		assertTrue(test);
-	}
-	
-	@Test
-	public void testConversionIteratorCellLevel() {
-		Scanner s = getCellLevelScanner();
-		s = setUpConversionIterator(s);
-		s.setScanIteratorOption("ci", ConversionIterator.OPTION_MULTI_DOC, "true");
-		
-		boolean test = true;
-		Map<String, Double> expected = new HashMap<String, Double>();
-		
-		expected.put("field0", 20.0);
-		expected.put("field1", 1.0);
-		expected.put("field2", 120.0);
-		expected.put("field3", 1.3);
-		expected.put("field4", 4.0);
-		
-		for (Entry<Key, Value> e: s) {
-			String field = getField(e.getKey());
-			if (field != null) {
-				test = test && expected.get(field).equals(new Double(Double.parseDouble(e.getValue().toString())));
-			}
-		}
-		
-		assertTrue(test);
-	}
-	
-	protected String getField(Key key) {
-		String colq = key.getColumnQualifier().toString();
-		int start = colq.indexOf("\u0000");
-		if (start == -1) {
-			return null;
-		}
-		
-		int end = colq.indexOf("\u0000", start + 1);
-		if (end == -1) {
-			end = colq.length();
-		}
-		
-		return colq.substring(start + 1, end);
-	}
-	
-	@Test
-	public void testCellLevelOGCFilter() {
-		Scanner s = getCellLevelScanner();
-		s.fetchColumnFamily(new Text("event"));
-		
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsBetween><PropertyName>field0</PropertyName>"
-			+ "<LowerBoundary><Literal>A</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>C</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testMultiLevelIterator() {
-		Scanner s = getCellLevelScanner();
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		s = setUpIntersectingIterator(s, terms, true);
-		
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsEqualTo><PropertyName>field0</PropertyName>"
-			+ "<Literal>A</Literal>"
-			+ "</PropertyIsEqualTo>");
-		
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testMultiLevelIterator2() {
-		Scanner s = getCellLevelScanner();
-		s = setUpSortedRangeIterator(s, true);
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsEqualTo><PropertyName>field0</PropertyName>"
-			+ "<Literal>A</Literal>"
-			+ "</PropertyIsEqualTo>");
-		
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testCellLevelRecordIterator() {
-		Scanner s = getCellLevelScanner();
-		s = setUpSortedRangeIterator(s, true);
-		try {
-			s.setScanIterators(60, CellLevelRecordIterator.class.getName(), "recordItr");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-//		for (Entry<Key, Value> e: s) {
-//			String v = e.getValue().toString();
-//			v = v.replaceAll("\\u0000", ",");
-//			v = v.replaceAll("\\uFFFD", "=");
-//			System.out.println(e.getKey() + "\t" + v);
-//		}
-		String test = "02,01,03";
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testIntersectionWithoutDocLookup() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		// all the evens will come first
-		String test = "\u000002,\u000001,\u000003";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_DOC_LOOKUP, "false");
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSimpleNot() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text("F")
-		};
-		
-		boolean[] nots = new boolean[] {
-			false,
-			true
-		};
-		
-		String test="01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.notFlagOptionName, GMDenIntersectingIterator.encodeBooleans(nots));
-		s.setRange(new Range());
-		
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testRangeNot() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "F", true, "H", true))
-		};
-		
-		boolean[] nots = new boolean[] {
-			false,
-			true
-		};
-		
-		String test = "01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.notFlagOptionName, GMDenIntersectingIterator.encodeBooleans(nots));
-		s.setRange(new Range());
-		
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testMinIteratorOnLastKeys() {
-		Scanner s = getSerializedScanner();
-		try {
-			s.setScanIterators(50, SortedMinIterator.class.getName(), "min");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("min", SortedMinIterator.OPTION_PREFIX, "z");
-		s.setRange(new Range());
-		
-		String test = "02,04,06,08,10,01,03,05,07,09";
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/JTSFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/JTSFilterTest.java b/partition/common-query-ext/src/test/java/JTSFilterTest.java
deleted file mode 100644
index 8224f64..0000000
--- a/partition/common-query-ext/src/test/java/JTSFilterTest.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.filter.jts.JTSFilter;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.security.Authorizations;
-
-/**
- *
- * @author rashah
- */
-public class JTSFilterTest
-{
-
-  private Connector cellLevelConn;
-  private Connector serializedConn;
-  private static final String TABLE = "partition";
-  private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-
-
-
-  protected Connector getSerializedConnector()
-  {
-    if (serializedConn == null)
-    {
-      serializedConn = SampleJTSData.initConnector();
-      SampleJTSData.writeDenSerialized(serializedConn, SampleJTSData.sampleData());
-    }
-    return serializedConn;
-  }
-
-
-
-  protected Scanner getSerializedScanner()
-  {
-    Connector c = getSerializedConnector();
-    try
-    {
-      return c.createScanner(TABLE, AUTHS);
-    }
-    catch (TableNotFoundException e)
-    {
-      return null;
-    }
-  }
-
-  protected Scanner setUpJTSFilter(Scanner s, String latitude, String longitude, boolean change_name)
-  {
-    try
-    {
-  
-      s.setScanIterators(50, FilteringIterator.class.getName(), "gvdf");
-      s.setScanIteratorOption("gvdf", "0", JTSFilter.class.getName());
-      s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONCenterPointLat, latitude);
-      s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONCenterPointLon, longitude);
-      if (change_name)
-          s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONGeometryKeyName, "beam-footprint");
-
-
-    }
-    catch (IOException e)
-    {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-    }
-    return s;
-  }
-
-  protected String checkSerialized(Scanner s)
-  {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (Entry<Key, Value> e : s)
-    {
-
-      if (!first)
-      {
-        sb.append(",");
-      }
-      else
-      {
-        first = false;
-      }
-
-      String colq = e.getKey().getColumnQualifier().toString();
-
-      sb.append(colq);
-    }
-    return sb.toString();
-  }
-
-
-  @Test
-  public void testNoResults()
-  {
-    //London is in niether - 51°30'0.00"N   0° 7'0.00"W
-    String latitude = "51.5";
-    String longitude = "0.11";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-//    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).isEmpty());
-  }
-
-
-  @Test
-  public void testOneResultAmerica()
-  {
-    //This is North America
-    //Points  39°44'21.00"N 104°59'3.00"W (Denver) are in the footprint
-    String latitude = "33";
-    String longitude = "-93.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("02"));
-  }
-
-
-  @Test
-  public void testOneResultAustralia()
-  {
-    //This is Australia
-    //Points like 22S 135E are in the beam
-    String latitude = "-9";
-    String longitude = "100.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("01"));
-  }
-
-  @Test
-  public void testOneResultHawaii()
-  {
-    // -164 40 - somewhere near hawaii
-
-    //This is Australia
-    //Points like 22S 135E are in the beam
-    String latitude = "40";
-    String longitude = "-164.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, true);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("03"));
-  }
-
-
-  @Test
-  public void testDummyTest()
-  {
-    assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/OGCFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/OGCFilterTest.java b/partition/common-query-ext/src/test/java/OGCFilterTest.java
deleted file mode 100644
index fd54945..0000000
--- a/partition/common-query-ext/src/test/java/OGCFilterTest.java
+++ /dev/null
@@ -1,163 +0,0 @@
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-
-import static org.junit.Assert.*;
-
-public class OGCFilterTest {
-	private Key testKey = new Key(new Text("row"), new Text("colf"), new Text("colq"));
-	private Value testValue = new Value("uuid~event\uFFFDmy-event-hash-1\u0000date\uFFFD20100819\u0000time~dss\uFFFD212706.000\u0000frequency\uFFFD3.368248181443644E8\u0000latitude\uFFFD48.74571142707959\u0000longitude\uFFFD13.865561564126812\u0000altitude\uFFFD1047.0\u0000datetime\uFFFD2010-08-19T21:27:06.000Z\u0000test~key\uFFFD\u0000key\uFFFDa\uFFFDb".getBytes());
-
-	public OGCFilterTest() {
-
-	}
-
-	private OGCFilter getFilter(String filter) {
-		OGCFilter f = new OGCFilter();
-		Map<String, String> options = new HashMap<String, String>();
-		options.put(OGCFilter.OPTION_FILTER, filter);
-		f.init(options);
-		return f;
-	}
-
-	@Test
-	public void testBBOX() {
-		OGCFilter f = getFilter("<BBOX><gml:Envelope>"
-			+ "<gml:LowerCorner>13 48</gml:LowerCorner>"
-			+ "<gml:UpperCorner>14 49</gml:UpperCorner>"
-			+ "</gml:Envelope></BBOX>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testBetweenStr() {
-		OGCFilter f = getFilter("<PropertyIsBetween><PropertyName>datetime</PropertyName>"
-			+ "<LowerBoundary><Literal>2010-08-19</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>2010-08-20</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testBetweenNum() {
-		OGCFilter f = getFilter("<PropertyIsBetween><PropertyName>frequency</PropertyName>"
-			+ "<LowerBoundary><Literal>330000000</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>340000000</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testEqualStr() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>uuid~event</PropertyName><Literal>my-event-hash-1</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testEqualNum() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testGreaterThanStr() {
-		OGCFilter f = getFilter("<PropertyIsGreaterThan><PropertyName>datetime</PropertyName><Literal>2010-08-15</Literal></PropertyIsGreaterThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testGreaterThanNum() {
-		OGCFilter f = getFilter("<PropertyIsGreaterThan><PropertyName>altitude</PropertyName><Literal>1000</Literal></PropertyIsGreaterThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLessThanStr() {
-		OGCFilter f = getFilter("<PropertyIsLessThan><PropertyName>datetime</PropertyName><Literal>2010-08-20</Literal></PropertyIsLessThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLessThanNum() {
-		OGCFilter f = getFilter("<PropertyIsLessThan><PropertyName>altitude</PropertyName><Literal>1200</Literal></PropertyIsLessThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLike() {
-		OGCFilter f = getFilter("<PropertyIsLike><PropertyName>uuid~event</PropertyName><Literal>*event*</Literal></PropertyIsLike>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNotEqualNum() {
-		OGCFilter f = getFilter("<PropertyIsNotEqualTo><PropertyName>altitude</PropertyName><Literal>1046</Literal></PropertyIsNotEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNull() {
-		OGCFilter f = getFilter("<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNot() {
-		OGCFilter f = getFilter("<Not><PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo></Not>");
-		assertFalse(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testAnd() {
-		OGCFilter f = getFilter("<And>"
-			+ "<PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo>"
-			+ "<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>"
-			+ "</And>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testOr() {
-		OGCFilter f = getFilter("<Or>"
-			+ "<PropertyIsLike><PropertyName>uuid~event</PropertyName><Literal>*event*</Literal></PropertyIsLike>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "</Or>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNand() {
-		OGCFilter f = getFilter("<Not><And>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>"
-			+ "</And></Not>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNor() {
-		OGCFilter f = getFilter("<Not>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "<PropertyIsNull><PropertyName>altitude</PropertyName></PropertyIsNull>"
-			+ "</Not>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testParse() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>key</PropertyName><Literal>a</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/SampleData.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/SampleData.java b/partition/common-query-ext/src/test/java/SampleData.java
deleted file mode 100644
index 071076b..0000000
--- a/partition/common-query-ext/src/test/java/SampleData.java
+++ /dev/null
@@ -1,228 +0,0 @@
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-
-public class SampleData {
-	public static int NUM_PARTITIONS = 2;
-	public static int NUM_SAMPLES = 10;
-	
-	public static Connector initConnector() {
-		Instance instance = new MockInstance();
-		
-		try {
-			Connector connector = instance.getConnector("root", "password".getBytes());
-			
-			// set up table
-			connector.tableOperations().create("partition");
-			connector.tableOperations().create("provenance");
-			
-			// set up root's auths
-			connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-			
-			return connector;
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableExistsException e) {
-			e.printStackTrace();
-		}
-		
-		return null;
-	}
-	
-	public static Collection<Map<String, String>> sampleData() {
-		List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-		Map<String, String> item;
-		
-		for (int i = 0; i < NUM_SAMPLES; i++) {
-			item = new HashMap<String, String>();
-			for (int j = 0; j < 5; j++) {
-				item.put("field" + j , new String(new char[] {(char) ('A' + ((j + i) % 26))}));
-			}
-			list.add(item);
-		}
-		return list;
-	}
-	
-	public static void writeDenCellLevel(Connector connector, Collection<Map<String, String>> data) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			int count = 0;
-			Mutation m;
-			for (Map<String, String> object: data) {
-				count++;
-				String id = (count < 10 ? "0" + count: "" + count);
-				Text partition = new Text("" + (count % NUM_PARTITIONS));
-				
-				// write dummy record
-				m = new Mutation(partition);
-				m.put("event", id, "");
-				writer.addMutation(m);
-				
-				for (Entry<String, String> entry: object.entrySet()) {
-					// write the event mutation
-					m = new Mutation(partition);
-					m.put("event", id + "\u0000" + entry.getKey(), entry.getValue());
-					writer.addMutation(m);
-					
-					// write the general index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-					
-					// write the specific index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-				}
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			int count = 0;
-			Mutation m;
-			for (Map<String, String> object: data) {
-				count++;
-				String id = (count < 10 ? "0" + count: "" + count);
-				Text partition = new Text("" + (count % NUM_PARTITIONS));
-				
-				StringBuilder value = new StringBuilder();
-				boolean first = true;
-				for (Entry<String, String> entry: object.entrySet()) {
-					if (!first) {
-						value.append("\u0000");
-					} else {
-						first = false;
-					}
-					value.append(entry.getKey());
-					value.append("\uFFFD");
-					value.append(entry.getValue());
-					
-					// write the general index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-					
-					// write the specific index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-				}
-				
-				// write the event mutation
-				m = new Mutation(partition);
-				m.put("event", id, value.toString());
-				writer.addMutation(m);
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeDenProvenance(Connector connector) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("provenance");
-			} else {
-				writer = connector.createBatchWriter("provenance", 200000, 10000, 1);
-			}
-			Mutation m;
-			for (int sid = 1; sid <= 2; sid++) {
-				for (int time = 1; time <= 3; time++) {
-					for (int uuid = 1; uuid <= (6 + 2 * time); uuid++) {
-						m = new Mutation(new Text("sid" + sid));
-						m.put("time" + time, "uuid-" + Integer.toHexString(uuid), "");
-						writer.addMutation(m);
-					}
-				}
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeMinIndexes(Connector connector) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			Mutation m;
-			for (int i = 1; i <= NUM_SAMPLES; i++) {
-				m = new Mutation(new Text("" + (i % NUM_PARTITIONS)));
-				
-				String id = (i < 10 ? "0" + i: "" + i);
-				
-				m.put("index", "z_" + id + "_rdate\u0000" + id, "");
-				writer.addMutation(m);
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query-ext/src/test/java/SampleGVData.java
----------------------------------------------------------------------
diff --git a/partition/common-query-ext/src/test/java/SampleGVData.java b/partition/common-query-ext/src/test/java/SampleGVData.java
deleted file mode 100644
index d8168de..0000000
--- a/partition/common-query-ext/src/test/java/SampleGVData.java
+++ /dev/null
@@ -1,182 +0,0 @@
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-// For use in testing the Date Filter and Frequency Filter classes
-public class SampleGVData
-{
-
-  public static int NUM_PARTITIONS = 2;
-
-
-  public static Connector initConnector()
-  {
-    Instance instance = new MockInstance();
-
-    try
-    {
-      Connector connector = instance.getConnector("root", "password".getBytes());
-
-      // set up table
-      connector.tableOperations().create("partition");
-
-      // set up root's auths
-      connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-
-      return connector;
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableExistsException e)
-    {
-      e.printStackTrace();
-    }
-
-    return null;
-  }
-
-  public static Collection<Map<String, String>> sampleData()
-  {
-    List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-    Map<String, String> item;
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-
-    //This one is like RB
-    item.put("date-start",  "2009-01-01");
-    item.put("date-end",    "2011-02-24");
-    item.put("date-update", "2011-02-24T00:00:00Z");
-    item.put("frequency",  "1250000000");
-    item.put("bandwidth",   "500000000");
-    item.put("version",     "1");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    list.add(item);
-
-    //This one is like GV
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    item.put("date-start",  "2010-01-01");
-    item.put("date-update", "2010-01-23");
-    item.put("frequency",  "1150000000");
-    item.put("bandwidth",   "300000000");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    item.put("date-start",  "2009-01-01");
-    item.put("date-end",    "2011-02-24");
-    item.put("date-update", "2008-01-23");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    list.add(item);
-
-    return list;
-  }
-
-
-  public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data)
-  {
-    // write sample data
-    MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-    try
-    {
-      BatchWriter writer;
-      if (mtbw != null)
-      {
-        writer = mtbw.getBatchWriter("partition");
-      }
-      else
-      {
-        writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-      }
-      int count = 0;
-      Mutation m;
-      for (Map<String, String> object : data)
-      {
-        count++;
-        String id = (count < 10 ? "0" + count : "" + count);
-        Text partition = new Text("" + (count % NUM_PARTITIONS));
-
-        StringBuilder value = new StringBuilder();
-        boolean first = true;
-        for (Entry<String, String> entry : object.entrySet())
-        {
-          if (!first)
-          {
-            value.append("\u0000");
-          }
-          else
-          {
-            first = false;
-          }
-          value.append(entry.getKey());
-          value.append("\uFFFD");
-          value.append(entry.getValue());
-
-          // write the general index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-
-          // write the specific index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-        }
-
-        // write the event mutation
-        m = new Mutation(partition);
-        m.put("event", id, value.toString());
-        writer.addMutation(m);
-      }
-      writer.close();
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableNotFoundException e)
-    {
-      e.printStackTrace();
-    }
-  }
-}


[12/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java
deleted file mode 100644
index 918223c..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java
+++ /dev/null
@@ -1,499 +0,0 @@
-package mvm.rya;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.openrdf.model.Resource;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.resultio.text.tsv.SPARQLResultsTSVWriter;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.rio.RDFFormat;
-import org.openrdf.rio.RDFParseException;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.rdftriplestore.RdfCloudTripleStore;
-import mvm.rya.rdftriplestore.RyaSailRepository;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-import mvm.rya.rdftriplestore.namespace.NamespaceManager;
-import junit.framework.TestCase;
-
-/**
- * The purpose of this is to provide a test case that illustrates a failure that is being encountered. A working test is
- * provided as well to demonstrate that a successful query can be made.
- */
-public class ArbitraryLengthQueryTest extends TestCase {
-
-    /**
-     * The repository used for the tests.
-     */
-    private Repository repository;
-
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-
-        final RdfCloudTripleStore store = new MockRdfCloudStore();
-
-        final NamespaceManager nm = new NamespaceManager(store.getRyaDAO(), store.getConf());
-        store.setNamespaceManager(nm);
-
-        repository = new RyaSailRepository(store);
-        repository.initialize();
-
-        load();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        repository.shutDown();
-    }
-
-    /**
-     * This test works. The expected result is 6 rows ranging from "Model1Class 1" through "Model1Class 6".
-     *
-     * @throws RepositoryException
-     * @throws QueryEvaluationException
-     * @throws TupleQueryResultHandlerException
-     *
-     * @throws MalformedQueryException
-     */
-    public void testWithoutSubquery() throws RepositoryException, QueryEvaluationException, TupleQueryResultHandlerException, MalformedQueryException {
-        final String query = "SELECT ?i ?i_label ?i_class ?i_v1"
-                + "WHERE {"
-                + "?i <http://www.w3.org/2000/01/rdf-schema#label> ?i_label ."
-                + "?i a ?i_class ."
-                + "?i_class <http://www.w3.org/2000/01/rdf-schema#subClassOf>* <http://dragon-research.com/cham/model/model1#Model1Class> ."
-                + "OPTIONAL { ?i <http://dragon-research.com/cham/model/model1#name> ?i_v1 } ."
-                + "}"
-                + "ORDER BY ?i_label";
-
-        final RepositoryConnection conn = repository.getConnection();
-        final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        RdfCloudTripleStoreConnectionTest.CountTupleHandler countTupleHandler = new RdfCloudTripleStoreConnectionTest.CountTupleHandler();
-        tupleQuery.evaluate(countTupleHandler);
-        assertEquals(6, countTupleHandler.getCount());
-        conn.close();
-    }
-
-    /**
-     * This test fails. The expected result is 6 rows ranging from "Model1Class 1 Event" to "Model1Class 6 Event". The
-     * current result is a RejectedExecutionException.
-     *
-     * @throws RepositoryException
-     * @throws QueryEvaluationException
-     * @throws TupleQueryResultHandlerException
-     *
-     * @throws MalformedQueryException
-     */
-    public void testWithSubquery() throws RepositoryException, QueryEvaluationException, TupleQueryResultHandlerException, MalformedQueryException {
-        final String query = "SELECT ?i ?i_label ?i_class ?i_v1 ?i_v2 ?i_v2_label ?i_v2_class ?i_v2_v1"
-                + "WHERE {"
-                + "?i <http://www.w3.org/2000/01/rdf-schema#label> ?i_label ."
-                + "?i a ?i_class ."
-                + "?i_class <http://www.w3.org/2000/01/rdf-schema#subClassOf>* <http://dragon-research.com/cham/model/model1#Event> ."
-                + "OPTIONAL { ?i <http://dragon-research.com/cham/model/model1#name> ?i_v1 } ."
-                + "?i <http://dragon-research.com/cham/model/model1#hasTemporalEntity> ?i_v2 ."
-                + "{"
-                + "SELECT ?i_v2 ?i_v2_label ?i_v2_class ?i_v2_v1"
-                + "WHERE {"
-                + "?i_v2 <http://www.w3.org/2000/01/rdf-schema#label> ?i_v2_label ."
-                + "?i_v2 a ?i_v2_class ."
-                + "?i_v2_class <http://www.w3.org/2000/01/rdf-schema#subClassOf>* <http://dragon-research.com/cham/model/model1#TemporalInstant> ."
-                + "OPTIONAL { ?i_v2 <http://dragon-research.com/cham/model/model1#dateTime> ?i_v2_v1 } ."
-                + "}"
-                + "ORDER BY ?i_v2_label"
-                + "}"
-                + "}"
-                + "ORDER BY ?i_label";
-
-        final RepositoryConnection conn = repository.getConnection();
-        final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        RdfCloudTripleStoreConnectionTest.CountTupleHandler countTupleHandler = new RdfCloudTripleStoreConnectionTest.CountTupleHandler();
-        tupleQuery.evaluate(countTupleHandler);
-        assertEquals(6, countTupleHandler.getCount());
-        conn.close();
-    }
-
-    /**
-     * Load the t-box and a-box turtle from strings defined within this class.
-     *
-     * @throws RepositoryException
-     * @throws RDFParseException
-     * @throws IOException
-     */
-    private void load() throws RepositoryException, RDFParseException, IOException {
-        final RepositoryConnection conn = repository.getConnection();
-
-        // T-Box
-        String ttlString = MODEL_TTL;
-        InputStream stringInput = new ByteArrayInputStream(ttlString.getBytes());
-        conn.add(stringInput, "http://dragon-research.com/cham/model/model1", RDFFormat.TURTLE, new Resource[]{});
-
-        // A-Box
-        ttlString = BUCKET_TTL;
-        stringInput = new ByteArrayInputStream(ttlString.getBytes());
-        conn.add(stringInput, "http://dragon-research.com/cham/bucket/bucket1", RDFFormat.TURTLE, new Resource[]{});
-
-        conn.commit();
-        conn.close();
-    }
-
-    /**
-     * Mock RDF cloud store for one shot testing.
-     */
-    public class MockRdfCloudStore extends RdfCloudTripleStore {
-        public MockRdfCloudStore() {
-            super();
-            final Instance instance = new MockInstance();
-            try {
-                final AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-                setConf(conf);
-
-                final Connector connector = instance.getConnector("", "");
-                final AccumuloRyaDAO cdao = new AccumuloRyaDAO();
-                cdao.setConf(conf);
-                cdao.setConnector(connector);
-                setRyaDAO(cdao);
-                inferenceEngine = new InferenceEngine();
-                inferenceEngine.setRyaDAO(cdao);
-                inferenceEngine.setRefreshGraphSchedule(5000); //every 5 sec
-                inferenceEngine.setConf(conf);
-                setInferenceEngine(inferenceEngine);
-            } catch (final Exception e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-    /**
-     * The ontology t-box in turtle.
-     */
-    private static String MODEL_TTL = "@prefix :        <http://dragon-research.com/cham/model/model1#> ."
-            + "@prefix cham:    <http://dragon-research.com/cham/schema#> ."
-            + "@prefix dc:      <http://purl.org/dc/elements/1.1/> ."
-            + "@prefix owl:     <http://www.w3.org/2002/07/owl#> ."
-            + "@prefix qudt:    <http://data.nasa.gov/qudt/owl/qudt#> ."
-            + "@prefix rdf:     <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ."
-            + "@prefix rdfs:    <http://www.w3.org/2000/01/rdf-schema#> ."
-            + "@prefix unit:    <http://data.nasa.gov/qudt/owl/unit#> ."
-            + "@prefix xml:     <http://www.w3.org/XML/1998/namespace> ."
-            + "@prefix xsd:     <http://www.w3.org/2001/XMLSchema#> ."
-            + ""
-            + "<http://dragon-research.com/cham/model/model1>"
-            + "      rdf:type owl:Ontology ;"
-            + "      rdfs:label \"Model1 Ontology\"^^xsd:string ;"
-            + "      :versionInfo \"0.1\"^^xsd:string ;"
-            + "      dc:title \"Model1 Ontology\"^^xsd:string ."
-            + ""
-            + ":ModelClassD"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"ModelClassD\"^^xsd:string ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:maxQualifiedCardinality"
-            + "                        \"1\"^^xsd:nonNegativeInteger ;"
-            + "                owl:onDataRange xsd:string ;"
-            + "                owl:onProperty :name"
-            + "              ] ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:allValuesFrom :Model1ClassAssoc ;"
-            + "                owl:onProperty :hasModel1ClassAssoc"
-            + "              ] ."
-            + ""
-            + ":ModelClassC"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"ModelClassC\"^^xsd:string ;"
-            + "      rdfs:subClassOf :ModelClassD ."
-            + ""
-            + ":Modle1ClassB"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"Modle1ClassB\"^^xsd:string ;"
-            + "      rdfs:subClassOf :ModelClassC ."
-            + ""
-            + ":Model1ClassA"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"Model1ClassA\"^^xsd:string ;"
-            + "      rdfs:subClassOf :Modle1ClassB ."
-            + ""
-            + ":Model1Class"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"Model1Class\"^^xsd:string ;"
-            + "      rdfs:subClassOf :Model1ClassA ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:maxQualifiedCardinality"
-            + "                        \"1\"^^xsd:nonNegativeInteger ;"
-            + "                owl:onDataRange xsd:string ;"
-            + "                owl:onProperty :model1ClassId"
-            + "              ] ."
-            + ""
-            + ":Model1Event"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"Model1Event\"^^xsd:string ;"
-            + "      rdfs:subClassOf :Event ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:allValuesFrom :Model1ClassA ;"
-            + "                owl:onProperty :hasModel1ClassA"
-            + "              ] ."
-            + ""
-            + ":Model1ClassAssoc"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"Model1ClassAssoc\"^^xsd:string ;"
-            + "      rdfs:subClassOf owl:Thing ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:maxQualifiedCardinality"
-            + "                        \"1\"^^xsd:nonNegativeInteger ;"
-            + "                owl:onDataRange xsd:string ;"
-            + "                owl:onProperty :name"
-            + "              ] ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:maxQualifiedCardinality"
-            + "                        \"1\"^^xsd:nonNegativeInteger ;"
-            + "                owl:onClass :ModelClassD ;"
-            + "                owl:onProperty :hasEntity"
-            + "              ] ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:allValuesFrom :ModelClassD ;"
-            + "                owl:onProperty :hasEntity"
-            + "              ] ."
-            + ""
-            + ":TemporalEntity"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"TemporalEntity\"^^xsd:string ;"
-            + "      rdfs:subClassOf owl:Thing ."
-            + ""
-            + ":TemporalInstant"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"TemporalInstant\"^^xsd:string ;"
-            + "      rdfs:subClassOf :TemporalEntity ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:maxQualifiedCardinality"
-            + "                        \"1\"^^xsd:nonNegativeInteger ;"
-            + "                owl:onDataRange xsd:dateTime ;"
-            + "                owl:onProperty :dateTime"
-            + "              ] ."
-            + ""
-            + ":model1ClassId"
-            + "      rdf:type owl:DatatypeProperty ;"
-            + "      rdfs:domain :Model1Class ;"
-            + "      rdfs:label \"model1ClassId\"^^xsd:string ;"
-            + "      rdfs:range xsd:string ."
-            + ""
-            + ":hasModel1ClassAssoc"
-            + "      rdf:type owl:ObjectProperty ;"
-            + "      rdfs:domain :ModelClassD ;"
-            + "      rdfs:label \"hasModel1ClassAssoc\"^^xsd:string ;"
-            + "      rdfs:range :Model1ClassAssoc ."
-            + ""
-            + ":name"
-            + "      rdf:type owl:DatatypeProperty ;"
-            + "      rdfs:domain :Model1ClassAssoc , :ModelClassD ;"
-            + "      rdfs:label \"name\"^^xsd:string ;"
-            + "      rdfs:range xsd:string ."
-            + ""
-            + ":hasTemporalEntity"
-            + "      rdf:type owl:ObjectProperty ;"
-            + "      rdfs:domain :ThreatAnalysis , :Event , :TrackingData , :Threat , :Vulnerability ;"
-            + "      rdfs:label \"hasTemporalEntity\"^^xsd:string ;"
-            + "      rdfs:range :TemporalEntity ."
-            + ""
-            + ":hasEntity"
-            + "      rdf:type owl:ObjectProperty ;"
-            + "      rdfs:domain :Model1ClassAssoc ;"
-            + "      rdfs:label \"hasEntity\"^^xsd:string ;"
-            + "      rdfs:range :ModelClassD ."
-            + ""
-            + ":dateTime"
-            + "      rdf:type owl:DatatypeProperty ;"
-            + "      rdfs:domain :TemporalInstant ;"
-            + "      rdfs:label \"dateTime\"^^xsd:string ;"
-            + "      rdfs:range xsd:dateTime ."
-            + ""
-            + ":Event"
-            + "      rdf:type owl:Class ;"
-            + "      rdfs:label \"Event\"^^xsd:string ;"
-            + "      rdfs:subClassOf :ModelClassD ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:allValuesFrom :TemporalEntity ;"
-            + "                owl:onProperty :hasTemporalEntity"
-            + "              ] ;"
-            + "      rdfs:subClassOf"
-            + "              [ rdf:type owl:Restriction ;"
-            + "                owl:maxQualifiedCardinality"
-            + "                        \"1\"^^xsd:nonNegativeInteger ;"
-            + "                owl:onClass :TemporalEntity ;"
-            + "                owl:onProperty :hasTemporalEntity"
-            + "              ] ."
-            + ""
-            + ":hasModel1ClassA"
-            + "      rdf:type owl:ObjectProperty ;"
-            + "      rdfs:domain :Model1Event ;"
-            + "      rdfs:label \"hasModel1ClassA\"^^xsd:string ;"
-            + "      rdfs:range :Model1ClassA ."
-            + ""
-            + "rdfs:label"
-            + "      rdf:type owl:AnnotationProperty ."
-            + ""
-            + "xsd:date"
-            + "      rdf:type rdfs:Datatype ."
-            + ""
-            + "xsd:time"
-            + "      rdf:type rdfs:Datatype .";
-
-    /**
-     * The ontology a-box in turtle.
-     */
-    private static String BUCKET_TTL = "@prefix :        <http://dragon-research.com/cham/bucket/bucket1#> ."
-            + "@prefix rdfs:    <http://www.w3.org/2000/01/rdf-schema#> ."
-            + "@prefix owl:     <http://www.w3.org/2002/07/owl#> ."
-            + "@prefix xsd:     <http://www.w3.org/2001/XMLSchema#> ."
-            + "@prefix rdf:     <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ."
-            + "@prefix model1:   <http://dragon-research.com/cham/model/model1#> ."
-            + ""
-            + ":i1   a       model1:Model1Class ;"
-            + "      rdfs:label \"Model1Class 1\"^^xsd:string ;"
-            + "      model1:name \"Model1Class 1\"^^xsd:string ;"
-            + "      model1:hasModel1ClassAssoc :i1-assoc ;"
-            + "      model1:model1ClassId \"ID01\"^^xsd:string ."
-            + "      "
-            + ":i1-assoc a model1:Model1ClassAssoc ;"
-            + "      rdfs:label \"Model1Class 1 Assoc\"^^xsd:string ;"
-            + "      model1:hasEntity :i1-event ."
-            + "      "
-            + ":i1-event a model1:Model1Event ;"
-            + "      rdfs:label \"Model1Class 1 Event\"^^xsd:string ;"
-            + "      model1:hasTemporalEntity :i1-time ."
-            + ""
-            + ":i1-time a model1:TemporalInstant ;"
-            + "      rdfs:label \"Model1Class 1 Time\"^^xsd:string ;"
-            + "      model1:dateTime \"1994-02-07T21:47:01.000Z\"^^xsd:dateTime ."
-            + "      "
-            + ":i2   a       model1:Model1Class ;"
-            + "      rdfs:label \"Model1Class 2\"^^xsd:string ;"
-            + "      model1:name \"Model1Class 2\"^^xsd:string ;"
-            + "      model1:hasModel1ClassAssoc :i2-assoc ;"
-            + "      model1:model1ClassId \"ID02\"^^xsd:string ."
-            + ""
-            + ":i2-assoc a model1:Model1ClassAssoc ;"
-            + "      rdfs:label \"Model1Class 2 Assoc\"^^xsd:string ;"
-            + "      model1:hasEntity :i2-event ."
-            + "      "
-            + ":i2-event a model1:Model1Event ;"
-            + "      rdfs:label \"Model1Class 2 Event\"^^xsd:string ;"
-            + "      model1:hasTemporalEntity :i2-time ."
-            + ""
-            + ":i2-time a model1:TemporalInstant ;"
-            + "      rdfs:label \"Model1Class 2 Time\"^^xsd:string ;"
-            + "      model1:dateTime \"1995-11-06T05:15:01.000Z\"^^xsd:dateTime ."
-            + "      "
-            + ":i3   a       model1:Model1Class ;"
-            + "      rdfs:label \"Model1Class 3\"^^xsd:string ;"
-            + "      model1:name \"Model1Class 3\"^^xsd:string ;"
-            + "      model1:hasModel1ClassAssoc :i3-assoc ;"
-            + "      model1:model1ClassId \"ID03\"^^xsd:string ."
-            + ""
-            + ":i3-assoc a model1:Model1ClassAssoc ;"
-            + "      rdfs:label \"Model1Class 3 Assoc\"^^xsd:string ;"
-            + "      model1:hasEntity :i3-event ."
-            + "      "
-            + ":i3-event a model1:Model1Event ;"
-            + "      rdfs:label \"Model1Class 3 Event\"^^xsd:string ;"
-            + "      model1:hasTemporalEntity :i3-time ."
-            + ""
-            + ":i3-time a model1:TemporalInstant ;"
-            + "      rdfs:label \"Model1Class 3 Time\"^^xsd:string ;"
-            + "      model1:dateTime \"1999-04-30T16:30:00.000Z\"^^xsd:dateTime ."
-            + "      "
-            + ":i4   a       model1:Model1Class ;"
-            + "      rdfs:label \"Model1Class 4\"^^xsd:string ;"
-            + "      model1:name \"Model1Class 4\"^^xsd:string ;"
-            + "      model1:hasModel1ClassAssoc :i4-assoc ;"
-            + "      model1:model1ClassId \"ID04\"^^xsd:string ."
-            + ""
-            + ":i4-assoc a model1:Model1ClassAssoc ;"
-            + "      rdfs:label \"Model1Class 4 Assoc\"^^xsd:string ;"
-            + "      model1:hasEntity :i4-event ."
-            + "      "
-            + ":i4-event a model1:Model1Event ;"
-            + "      rdfs:label \"Model1Class 4 Event\"^^xsd:string ;"
-            + "      model1:hasTemporalEntity :i4-time ."
-            + ""
-            + ":i4-time a model1:TemporalInstant ;"
-            + "      rdfs:label \"Model1Class 4 Time\"^^xsd:string ;"
-            + "      model1:dateTime \"2001-02-27T21:20:00.000Z\"^^xsd:dateTime ."
-            + "      "
-            + ":i5   a       model1:Model1Class ;"
-            + "      rdfs:label \"Model1Class 5\"^^xsd:string ;"
-            + "      model1:name \"Model1Class 5\"^^xsd:string ;"
-            + "      model1:hasModel1ClassAssoc :i5-assoc ;"
-            + "      model1:model1ClassId \"ID05\"^^xsd:string ."
-            + ""
-            + ":i5-assoc a model1:Model1ClassAssoc ;"
-            + "      rdfs:label \"Model1Class 5 Assoc\"^^xsd:string ;"
-            + "      model1:hasEntity :i5-event ."
-            + "      "
-            + ":i5-event a model1:Model1Event ;"
-            + "      rdfs:label \"Model1Class 5 Event\"^^xsd:string ;"
-            + "      model1:hasTemporalEntity :i5-time ."
-            + ""
-            + ":i5-time a model1:TemporalInstant ;"
-            + "      rdfs:label \"Model1Class 5 Time\"^^xsd:string ;"
-            + "      model1:dateTime \"2002-01-16T00:30:00.000Z\"^^xsd:dateTime ."
-            + "      "
-            + ":i6   a       model1:Model1Class ;"
-            + "      rdfs:label \"Model1Class 6\"^^xsd:string ;"
-            + "      model1:name \"Model1Class 6\"^^xsd:string ;"
-            + "      model1:hasModel1ClassAssoc :i6-assoc ;"
-            + "      model1:model1ClassId \"ID06\"^^xsd:string ."
-            + ""
-            + ":i6-assoc a model1:Model1ClassAssoc ;"
-            + "      rdfs:label \"Model1Class 6 Assoc\"^^xsd:string ;"
-            + "      model1:hasEntity :i6-event ."
-            + "      "
-            + ":i6-event a model1:Model1Event ;"
-            + "      rdfs:label \"Model1Class 6 Event\"^^xsd:string ;"
-            + "      model1:hasTemporalEntity :i6-time ."
-            + ""
-            + ":i6-time a model1:TemporalInstant ;"
-            + "      rdfs:label \"Model1Class 6 Time\"^^xsd:string ;"
-            + "      model1:dateTime \"2003-04-08T13:43:00.000Z\"^^xsd:dateTime .";
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/HashJoinTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/HashJoinTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/HashJoinTest.java
deleted file mode 100644
index 9f39dca..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/HashJoinTest.java
+++ /dev/null
@@ -1,373 +0,0 @@
-package mvm.rya;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import junit.framework.TestCase;
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.query.join.HashJoin;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertFalse;
-import static junit.framework.Assert.assertTrue;
-
-/**
- * Date: 7/24/12
- * Time: 5:51 PM
- */
-public class HashJoinTest {
-    private AccumuloRyaDAO dao;
-    static String litdupsNS = "urn:test:litdups#";
-    private Connector connector;
-    private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-
-    @Before
-    public void init() throws Exception {
-        dao = new AccumuloRyaDAO();
-        connector = new MockInstance().getConnector("", "");
-        dao.setConnector(connector);
-        dao.setConf(conf);
-        dao.init();
-    }
-
-    @After
-    public void destroy() throws Exception {
-        dao.destroy();
-    }
-
-    @Test
-    public void testSimpleJoin() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        
-
-        //1 join
-        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two));
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj3));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testSimpleJoinMultiWay() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj2, pred, three));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, three));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        dao.add(new RyaStatement(subj4, pred, four));
-        
-
-        //1 join
-        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj3));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testMergeJoinMultiWay() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj2, pred, three));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        dao.add(new RyaStatement(subj4, pred, four));
-        
-
-        //1 join
-        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testMergeJoinMultiWayNone() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        
-
-        //1 join
-        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        assertFalse(join.hasNext());
-        join.close();
-    }
-
-    @Test
-    public void testMergeJoinMultiWayNone2() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        
-
-        //1 join
-        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        assertFalse(join.hasNext());
-        join.close();
-    }
-
-    @Test
-    public void testSimpleHashJoinPredicateOnly() throws Exception {
-        //add data
-        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
-        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
-        RyaType one = new RyaType("1");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred1, one));
-        dao.add(new RyaStatement(subj1, pred2, one));
-        dao.add(new RyaStatement(subj2, pred1, one));
-        dao.add(new RyaStatement(subj2, pred2, one));
-        dao.add(new RyaStatement(subj3, pred1, one));
-        dao.add(new RyaStatement(subj3, pred2, one));
-        dao.add(new RyaStatement(subj4, pred1, one));
-        dao.add(new RyaStatement(subj4, pred2, one));
-        
-
-        //1 join
-        HashJoin ijoin = new HashJoin(dao.getQueryEngine());
-        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
-
-        int count = 0;
-        while (join.hasNext()) {
-            RyaStatement next = join.next();
-            count++;
-        }
-        assertEquals(4, count);
-        join.close();
-    }
-
-    @Test
-    public void testSimpleMergeJoinPredicateOnly2() throws Exception {
-        //add data
-        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
-        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred1, one));
-        dao.add(new RyaStatement(subj1, pred1, two));
-        dao.add(new RyaStatement(subj1, pred1, three));
-        dao.add(new RyaStatement(subj1, pred2, one));
-        dao.add(new RyaStatement(subj1, pred2, two));
-        dao.add(new RyaStatement(subj1, pred2, three));
-        dao.add(new RyaStatement(subj2, pred1, one));
-        dao.add(new RyaStatement(subj2, pred1, two));
-        dao.add(new RyaStatement(subj2, pred1, three));
-        dao.add(new RyaStatement(subj2, pred2, one));
-        dao.add(new RyaStatement(subj2, pred2, two));
-        dao.add(new RyaStatement(subj2, pred2, three));
-        dao.add(new RyaStatement(subj3, pred1, one));
-        dao.add(new RyaStatement(subj3, pred1, two));
-        dao.add(new RyaStatement(subj3, pred1, three));
-        dao.add(new RyaStatement(subj3, pred2, one));
-        dao.add(new RyaStatement(subj3, pred2, two));
-        dao.add(new RyaStatement(subj3, pred2, three));
-        dao.add(new RyaStatement(subj4, pred1, one));
-        dao.add(new RyaStatement(subj4, pred1, two));
-        dao.add(new RyaStatement(subj4, pred1, three));
-        dao.add(new RyaStatement(subj4, pred2, one));
-        dao.add(new RyaStatement(subj4, pred2, two));
-        dao.add(new RyaStatement(subj4, pred2, three));
-        
-
-        //1 join
-        HashJoin ijoin = new HashJoin(dao.getQueryEngine());
-        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
-
-        int count = 0;
-        while (join.hasNext()) {
-            RyaStatement next = join.next();
-            count++;
-        }
-        assertEquals(12, count);
-        join.close();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/IterativeJoinTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/IterativeJoinTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/IterativeJoinTest.java
deleted file mode 100644
index de20d47..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/IterativeJoinTest.java
+++ /dev/null
@@ -1,364 +0,0 @@
-package mvm.rya;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import junit.framework.TestCase;
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.query.join.IterativeJoin;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import static junit.framework.Assert.*;
-
-/**
- * Date: 7/24/12
- * Time: 5:51 PM
- */
-public class IterativeJoinTest {
-    private AccumuloRyaDAO dao;
-    static String litdupsNS = "urn:test:litdups#";
-    private Connector connector;
-    private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-
-    @Before
-    public void init() throws Exception {
-        dao = new AccumuloRyaDAO();
-        connector = new MockInstance().getConnector("", "");
-        dao.setConnector(connector);
-        dao.setConf(conf);
-        dao.init();
-    }
-
-    @After
-    public void destroy() throws Exception {
-        dao.destroy();
-    }
-
-    @Test
-    public void testSimpleIterativeJoin() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-
-        //1 join
-        IterativeJoin iterJoin = new IterativeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = iterJoin.join(null, new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two));
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj3));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testSimpleIterativeJoinMultiWay() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj2, pred, three));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, three));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        dao.add(new RyaStatement(subj4, pred, four));
-
-        //1 join
-        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj3));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testIterativeJoinMultiWay() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj2, pred, three));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        dao.add(new RyaStatement(subj4, pred, four));
-
-        //1 join
-        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testIterativeJoinMultiWayNone() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-
-        //1 join
-        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null,
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        assertFalse(join.hasNext());
-        join.close();
-    }
-
-    @Test
-    public void testIterativeJoinMultiWayNone2() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-
-        //1 join
-        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null, new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
-                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        assertFalse(join.hasNext());
-        join.close();
-    }
-
-    @Test
-    public void testSimpleIterativeJoinPredicateOnly() throws Exception {
-        //add data
-        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
-        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
-        RyaType one = new RyaType("1");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred1, one));
-        dao.add(new RyaStatement(subj1, pred2, one));
-        dao.add(new RyaStatement(subj2, pred1, one));
-        dao.add(new RyaStatement(subj2, pred2, one));
-        dao.add(new RyaStatement(subj3, pred1, one));
-        dao.add(new RyaStatement(subj3, pred2, one));
-        dao.add(new RyaStatement(subj4, pred1, one));
-        dao.add(new RyaStatement(subj4, pred2, one));
-        
-
-        //1 join
-        IterativeJoin ijoin = new IterativeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
-
-        int count = 0;
-        while (join.hasNext()) {
-            RyaStatement next = join.next();
-            count++;
-        }
-        assertEquals(4, count);
-        join.close();
-    }
-
-    @Test
-    public void testSimpleIterativeJoinPredicateOnly2() throws Exception {
-        //add data
-        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
-        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred1, one));
-        dao.add(new RyaStatement(subj1, pred1, two));
-        dao.add(new RyaStatement(subj1, pred1, three));
-        dao.add(new RyaStatement(subj1, pred2, one));
-        dao.add(new RyaStatement(subj1, pred2, two));
-        dao.add(new RyaStatement(subj1, pred2, three));
-        dao.add(new RyaStatement(subj2, pred1, one));
-        dao.add(new RyaStatement(subj2, pred1, two));
-        dao.add(new RyaStatement(subj2, pred1, three));
-        dao.add(new RyaStatement(subj2, pred2, one));
-        dao.add(new RyaStatement(subj2, pred2, two));
-        dao.add(new RyaStatement(subj2, pred2, three));
-        dao.add(new RyaStatement(subj3, pred1, one));
-        dao.add(new RyaStatement(subj3, pred1, two));
-        dao.add(new RyaStatement(subj3, pred1, three));
-        dao.add(new RyaStatement(subj3, pred2, one));
-        dao.add(new RyaStatement(subj3, pred2, two));
-        dao.add(new RyaStatement(subj3, pred2, three));
-        dao.add(new RyaStatement(subj4, pred1, one));
-        dao.add(new RyaStatement(subj4, pred1, two));
-        dao.add(new RyaStatement(subj4, pred1, three));
-        dao.add(new RyaStatement(subj4, pred2, one));
-        dao.add(new RyaStatement(subj4, pred2, two));
-        dao.add(new RyaStatement(subj4, pred2, three));
-        
-
-        //1 join
-        IterativeJoin ijoin = new IterativeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
-
-        int count = 0;
-        while (join.hasNext()) {
-            RyaStatement next = join.next();
-            count++;
-        }
-        assertEquals(12, count);
-        join.close();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/MergeJoinTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/MergeJoinTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/MergeJoinTest.java
deleted file mode 100644
index 370f3fe..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/MergeJoinTest.java
+++ /dev/null
@@ -1,369 +0,0 @@
-package mvm.rya;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.query.join.MergeJoin;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import static junit.framework.Assert.*;
-import static mvm.rya.api.RdfCloudTripleStoreUtils.CustomEntry;
-
-/**
- * TODO: Move to rya.api when we have proper mock ryaDao
- *
- * Date: 7/24/12
- * Time: 9:49 AM
- */
-public class MergeJoinTest {
-
-    private AccumuloRyaDAO dao;
-    static String litdupsNS = "urn:test:litdups#";
-    private Connector connector;
-    private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-
-    @Before
-    public void init() throws Exception {
-        dao = new AccumuloRyaDAO();
-        connector = new MockInstance().getConnector("", "");
-        dao.setConnector(connector);
-        dao.setConf(conf);
-        dao.init();
-    }
-
-    @After
-    public void destroy() throws Exception {
-        dao.destroy();
-    }
-
-    @Test
-    public void testSimpleMergeJoin() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        
-
-        //1 join
-        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
-                new CustomEntry<RyaURI, RyaType>(pred, two));
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj3));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testSimpleMergeJoinPredicateOnly() throws Exception {
-        //add data
-        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
-        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
-        RyaType one = new RyaType("1");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred1, one));
-        dao.add(new RyaStatement(subj1, pred2, one));
-        dao.add(new RyaStatement(subj2, pred1, one));
-        dao.add(new RyaStatement(subj2, pred2, one));
-        dao.add(new RyaStatement(subj3, pred1, one));
-        dao.add(new RyaStatement(subj3, pred2, one));
-        dao.add(new RyaStatement(subj4, pred1, one));
-        dao.add(new RyaStatement(subj4, pred2, one));
-        
-
-        //1 join
-        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaStatement, RyaDAOException> join = mergeJoin.join(null, pred1, pred2);
-
-        int count = 0;
-        while (join.hasNext()) {
-            RyaStatement next = join.next();
-            count++;
-        }
-        assertEquals(4, count);
-        join.close();
-    }
-
-    @Test
-    public void testSimpleMergeJoinPredicateOnly2() throws Exception {
-        //add data
-        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
-        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred1, one));
-        dao.add(new RyaStatement(subj1, pred1, two));
-        dao.add(new RyaStatement(subj1, pred1, three));
-        dao.add(new RyaStatement(subj1, pred2, one));
-        dao.add(new RyaStatement(subj1, pred2, two));
-        dao.add(new RyaStatement(subj1, pred2, three));
-        dao.add(new RyaStatement(subj2, pred1, one));
-        dao.add(new RyaStatement(subj2, pred1, two));
-        dao.add(new RyaStatement(subj2, pred1, three));
-        dao.add(new RyaStatement(subj2, pred2, one));
-        dao.add(new RyaStatement(subj2, pred2, two));
-        dao.add(new RyaStatement(subj2, pred2, three));
-        dao.add(new RyaStatement(subj3, pred1, one));
-        dao.add(new RyaStatement(subj3, pred1, two));
-        dao.add(new RyaStatement(subj3, pred1, three));
-        dao.add(new RyaStatement(subj3, pred2, one));
-        dao.add(new RyaStatement(subj3, pred2, two));
-        dao.add(new RyaStatement(subj3, pred2, three));
-        dao.add(new RyaStatement(subj4, pred1, one));
-        dao.add(new RyaStatement(subj4, pred1, two));
-        dao.add(new RyaStatement(subj4, pred1, three));
-        dao.add(new RyaStatement(subj4, pred2, one));
-        dao.add(new RyaStatement(subj4, pred2, two));
-        dao.add(new RyaStatement(subj4, pred2, three));
-        
-
-        //1 join
-        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaStatement, RyaDAOException> join = mergeJoin.join(null, pred1, pred2);
-
-        int count = 0;
-        while (join.hasNext()) {
-            RyaStatement next = join.next();
-            count++;
-        }
-        assertEquals(12, count);
-        join.close();
-    }
-
-    @Test
-    public void testSimpleMergeJoinMultiWay() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj2, pred, three));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, three));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        dao.add(new RyaStatement(subj4, pred, four));
-        
-
-        //1 join
-        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
-                new CustomEntry<RyaURI, RyaType>(pred, two),
-                new CustomEntry<RyaURI, RyaType>(pred, three),
-                new CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj3));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testMergeJoinMultiWay() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, two));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, two));
-        dao.add(new RyaStatement(subj2, pred, three));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, one));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        dao.add(new RyaStatement(subj4, pred, four));
-        
-
-        //1 join
-        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
-                new CustomEntry<RyaURI, RyaType>(pred, two),
-                new CustomEntry<RyaURI, RyaType>(pred, three),
-                new CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        Set<RyaURI> uris = new HashSet<RyaURI>();
-        while (join.hasNext()) {
-            uris.add(join.next());
-        }
-        assertTrue(uris.contains(subj1));
-        assertTrue(uris.contains(subj2));
-        assertTrue(uris.contains(subj4));
-        join.close();
-    }
-
-    @Test
-    public void testMergeJoinMultiWayNone() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, three));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        dao.add(new RyaStatement(subj4, pred, three));
-        
-
-        //1 join
-        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
-                new CustomEntry<RyaURI, RyaType>(pred, two),
-                new CustomEntry<RyaURI, RyaType>(pred, three),
-                new CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        assertFalse(join.hasNext());
-        join.close();
-    }
-
-    @Test
-    public void testMergeJoinMultiWayNone2() throws Exception {
-        //add data
-        RyaURI pred = new RyaURI(litdupsNS, "pred1");
-        RyaType zero = new RyaType("0");
-        RyaType one = new RyaType("1");
-        RyaType two = new RyaType("2");
-        RyaType three = new RyaType("3");
-        RyaType four = new RyaType("4");
-        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
-        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
-        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
-        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
-
-        dao.add(new RyaStatement(subj1, pred, one));
-        dao.add(new RyaStatement(subj1, pred, four));
-        dao.add(new RyaStatement(subj2, pred, zero));
-        dao.add(new RyaStatement(subj2, pred, one));
-        dao.add(new RyaStatement(subj2, pred, four));
-        dao.add(new RyaStatement(subj3, pred, two));
-        dao.add(new RyaStatement(subj3, pred, four));
-        dao.add(new RyaStatement(subj4, pred, one));
-        dao.add(new RyaStatement(subj4, pred, two));
-        
-
-        //1 join
-        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
-        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
-                new CustomEntry<RyaURI, RyaType>(pred, two),
-                new CustomEntry<RyaURI, RyaType>(pred, three),
-                new CustomEntry<RyaURI, RyaType>(pred, four)
-        );
-
-        assertFalse(join.hasNext());
-        join.close();
-    }
-}


[25/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/IteratorTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/IteratorTest.java b/partition/common-query/src/test/java/IteratorTest.java
deleted file mode 100644
index 1b5cf14..0000000
--- a/partition/common-query/src/test/java/IteratorTest.java
+++ /dev/null
@@ -1,554 +0,0 @@
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.CellLevelFilteringIterator;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.ConversionIterator;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedMinIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.UniqueIterator;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-
-public class IteratorTest {
-	private Connector cellLevelConn;
-	private Connector serializedConn;
-	
-	private static final String TABLE = "partition";
-	private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-	
-	public IteratorTest() {
-		
-	}
-	
-	protected Connector getCellLevelConnector() {
-		if (cellLevelConn == null) {
-			cellLevelConn = SampleData.initConnector();
-			SampleData.writeDenCellLevel(cellLevelConn, SampleData.sampleData());
-		}
-		return cellLevelConn;
-	}
-	
-	protected Connector getSerializedConnector() {
-		if (serializedConn == null) {
-			serializedConn = SampleData.initConnector();
-			SampleData.writeDenSerialized(serializedConn, SampleData.sampleData());
-			SampleData.writeDenProvenance(serializedConn);
-			SampleData.writeMinIndexes(serializedConn);
-		}
-		return serializedConn;
-	}
-	
-	protected Scanner getProvenanceScanner() {
-		Connector c = getSerializedConnector();
-		try {
-			return c.createScanner("provenance", AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner getCellLevelScanner() {
-		Connector c = getCellLevelConnector();
-		try {
-			return c.createScanner(TABLE, AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner getSerializedScanner() {
-		Connector c = getSerializedConnector();
-		try {
-			return c.createScanner(TABLE, AUTHS);
-		} catch (TableNotFoundException e) {
-			return null;
-		}
-	}
-	
-	protected Scanner setUpIntersectingIterator(Scanner s, Text[] terms, boolean multiDoc) {
-		try {
-			s.setScanIterators(50, GMDenIntersectingIterator.class.getName(), "ii");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, "index");
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, "event");
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + multiDoc);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(terms));
-		return s;
-	}
-	
-	protected String checkSerialized(Scanner s) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		for (Entry<Key, Value> e: s) {
-			if (!first) {
-				sb.append(",");
-			} else {
-				first = false;
-			}
-			
-			String colq = e.getKey().getColumnQualifier().toString();
-			
-			sb.append(colq);
-		}
-		return sb.toString();
-	}
-	
-	protected String checkCellLevel(Scanner s) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		for (Entry<Key, Value> e: s) {
-			String colq = e.getKey().getColumnQualifier().toString();
-			int i = colq.indexOf("\u0000");
-			if (i > -1) {
-				if (!first) {
-					sb.append(",");
-				} else {
-					first = false;
-				}
-				sb.append(colq.substring(0, i));
-				sb.append(".");
-				sb.append(colq.substring(i + 1));
-				sb.append("=");
-				sb.append(e.getValue().toString());
-			}
-		}
-		return sb.toString();
-	}
-	
-	@Test
-	public void testSerializedSingleDuplicate() {
-		Text[] terms = new Text[] {
-			new Text("A"),
-			new Text("A")
-		};
-		
-		String test = "01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelSingleDuplicate() {
-		Text[] terms = new Text[] {
-			new Text("A"),
-			new Text("A")
-		};
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedTwoTerms() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		// all the evens will come first
-		String test = "02,01,03";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelTwoTerms() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedTwoTermsWithRange() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range(new Key(new Text("0")), true, new Key(new Text("1")), false));
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelTwoTermsWithRange() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F";
-		Scanner s = setUpIntersectingIterator(getCellLevelScanner(), terms, true);
-		s.setRange(new Range(new Key(new Text("0")), true, new Key(new Text("1")), false));
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testSerializedSingleRange() {
-		Text[] terms = new Text[] {
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true)),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSerializedMultiRange() {
-		Text[] terms = new Text[] {
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "B", true)),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "B", true, "C", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSerializedTermAndRange() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "A", true, "E", true))
-		};
-		
-		String test = "02,01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	protected Scanner setUpSortedRangeIterator(Scanner s, boolean multiDoc) {
-		try {
-			s.setScanIterators(50, SortedRangeIterator.class.getName(), "ri");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, "index");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, "event");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, "A");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, "C");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_START_INCLUSIVE, "true");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_END_INCLUSIVE, "true");
-			s.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + multiDoc);
-			return s;
-		} catch (IOException e) {
-			e.printStackTrace();
-			return null;
-		}
-	}
-	
-	@Test
-	public void testSerializedSortedRangeIterator() {
-		Scanner s = setUpSortedRangeIterator(getSerializedScanner(), false);
-		String test = "02,01,03";
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testCellLevelSortedRangeIterator() {
-		Scanner s = setUpSortedRangeIterator(getCellLevelScanner(), true);
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		s.setRange(new Range());
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testUniqueIterator() {
-		Scanner s = getProvenanceScanner();
-		try {
-			s.setScanIterators(50, UniqueIterator.class.getName(), "skipper");
-			Key start = new Key(new Text("sid1"));
-			s.setRange(new Range(start, start.followingKey(PartialKey.ROW)));
-			
-			int count = 0;
-			for (Entry<Key, Value> e: s) {
-				count++;
-			}
-			
-			assertEquals(count, 3);
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	protected Scanner setUpConversionIterator(Scanner s) {
-		String[] conversions = new String[] {
-			"field0 + 10",
-			"field1 - 10",
-			"field2 * 10",
-			"field3 / 10",
-			"field4 % 10"
-		};
-		
-		try {
-			s.setScanIterators(50, ConversionIterator.class.getName(), "ci");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("ci", ConversionIterator.OPTION_CONVERSIONS, ConversionIterator.encodeConversions(conversions));
-		Key start = new Key(new Text("1"), new Text("event"), new Text("01"));
-		s.setRange(new Range(start, true, start.followingKey(PartialKey.ROW_COLFAM_COLQUAL), false));
-		
-		return s;
-	}
-	
-	@Test
-	public void testConversionIteratorSerialized() {
-		Scanner s = getSerializedScanner();
-		s = setUpConversionIterator(s);
-		
-		CBConverter c = new CBConverter();
-		
-		boolean test = true;
-		Map<String, Double> expected = new HashMap<String, Double>();
-		
-		expected.put("field0", 20.0);
-		expected.put("field1", 1.0);
-		expected.put("field2", 120.0);
-		expected.put("field3", 1.3);
-		expected.put("field4", 4.0);
-		
-		Map<String, String> record;
-		
-		for (Entry<Key, Value> e: s) {
-			record = c.toMap(e.getKey(), e.getValue());
-			
-			for (Entry<String, String> pair: record.entrySet()) {
-				test = test && expected.get(pair.getKey()).equals(new Double(Double.parseDouble(record.get(pair.getKey()))));
-			}
-		}
-		
-		assertTrue(test);
-	}
-	
-	@Test
-	public void testConversionIteratorCellLevel() {
-		Scanner s = getCellLevelScanner();
-		s = setUpConversionIterator(s);
-		s.setScanIteratorOption("ci", ConversionIterator.OPTION_MULTI_DOC, "true");
-		
-		boolean test = true;
-		Map<String, Double> expected = new HashMap<String, Double>();
-		
-		expected.put("field0", 20.0);
-		expected.put("field1", 1.0);
-		expected.put("field2", 120.0);
-		expected.put("field3", 1.3);
-		expected.put("field4", 4.0);
-		
-		for (Entry<Key, Value> e: s) {
-			String field = getField(e.getKey());
-			if (field != null) {
-				test = test && expected.get(field).equals(new Double(Double.parseDouble(e.getValue().toString())));
-			}
-		}
-		
-		assertTrue(test);
-	}
-	
-	protected String getField(Key key) {
-		String colq = key.getColumnQualifier().toString();
-		int start = colq.indexOf("\u0000");
-		if (start == -1) {
-			return null;
-		}
-		
-		int end = colq.indexOf("\u0000", start + 1);
-		if (end == -1) {
-			end = colq.length();
-		}
-		
-		return colq.substring(start + 1, end);
-	}
-	
-	@Test
-	public void testCellLevelOGCFilter() {
-		Scanner s = getCellLevelScanner();
-		s.fetchColumnFamily(new Text("event"));
-		
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsBetween><PropertyName>field0</PropertyName>"
-			+ "<LowerBoundary><Literal>A</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>C</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-		
-		String test = "02.field0=B,02.field1=C,02.field2=D,02.field3=E,02.field4=F,"
-			+ "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E,"
-			+ "03.field0=C,03.field1=D,03.field2=E,03.field3=F,03.field4=G";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testMultiLevelIterator() {
-		Scanner s = getCellLevelScanner();
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		
-		s = setUpIntersectingIterator(s, terms, true);
-		
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsEqualTo><PropertyName>field0</PropertyName>"
-			+ "<Literal>A</Literal>"
-			+ "</PropertyIsEqualTo>");
-		
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testMultiLevelIterator2() {
-		Scanner s = getCellLevelScanner();
-		s = setUpSortedRangeIterator(s, true);
-		try {
-			s.setScanIterators(60, CellLevelFilteringIterator.class.getName(), "fi");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, "<PropertyIsEqualTo><PropertyName>field0</PropertyName>"
-			+ "<Literal>A</Literal>"
-			+ "</PropertyIsEqualTo>");
-		
-		String test = "01.field0=A,01.field1=B,01.field2=C,01.field3=D,01.field4=E";
-		assertTrue(test.equals(checkCellLevel(s)));
-	}
-	
-	@Test
-	public void testCellLevelRecordIterator() {
-		Scanner s = getCellLevelScanner();
-		s = setUpSortedRangeIterator(s, true);
-		try {
-			s.setScanIterators(60, CellLevelRecordIterator.class.getName(), "recordItr");
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		
-//		for (Entry<Key, Value> e: s) {
-//			String v = e.getValue().toString();
-//			v = v.replaceAll("\\u0000", ",");
-//			v = v.replaceAll("\\uFFFD", "=");
-//			System.out.println(e.getKey() + "\t" + v);
-//		}
-		String test = "02,01,03";
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testIntersectionWithoutDocLookup() {
-		Text[] terms = new Text[] {
-			new Text("C"),
-			new Text("D")
-		};
-		// all the evens will come first
-		String test = "\u000002,\u000001,\u000003";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_DOC_LOOKUP, "false");
-		s.setRange(new Range());
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testSimpleNot() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text("F")
-		};
-		
-		boolean[] nots = new boolean[] {
-			false,
-			true
-		};
-		
-		String test="01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.notFlagOptionName, GMDenIntersectingIterator.encodeBooleans(nots));
-		s.setRange(new Range());
-		
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testRangeNot() {
-		Text[] terms = new Text[] {
-			new Text("B"),
-			new Text(GMDenIntersectingIterator.getRangeTerm("index", "F", true, "H", true))
-		};
-		
-		boolean[] nots = new boolean[] {
-			false,
-			true
-		};
-		
-		String test = "01";
-		Scanner s = setUpIntersectingIterator(getSerializedScanner(), terms, false);
-		s.setScanIteratorOption("ii", GMDenIntersectingIterator.notFlagOptionName, GMDenIntersectingIterator.encodeBooleans(nots));
-		s.setRange(new Range());
-		
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-	
-	@Test
-	public void testMinIteratorOnLastKeys() {
-		Scanner s = getSerializedScanner();
-		try {
-			s.setScanIterators(50, SortedMinIterator.class.getName(), "min");
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		s.setScanIteratorOption("min", SortedMinIterator.OPTION_PREFIX, "z");
-		s.setRange(new Range());
-		
-		String test = "02,04,06,08,10,01,03,05,07,09";
-		assertTrue(test.equals(checkSerialized(s)));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/JTSFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/JTSFilterTest.java b/partition/common-query/src/test/java/JTSFilterTest.java
deleted file mode 100644
index 8224f64..0000000
--- a/partition/common-query/src/test/java/JTSFilterTest.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.filter.jts.JTSFilter;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.security.Authorizations;
-
-/**
- *
- * @author rashah
- */
-public class JTSFilterTest
-{
-
-  private Connector cellLevelConn;
-  private Connector serializedConn;
-  private static final String TABLE = "partition";
-  private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-
-
-
-  protected Connector getSerializedConnector()
-  {
-    if (serializedConn == null)
-    {
-      serializedConn = SampleJTSData.initConnector();
-      SampleJTSData.writeDenSerialized(serializedConn, SampleJTSData.sampleData());
-    }
-    return serializedConn;
-  }
-
-
-
-  protected Scanner getSerializedScanner()
-  {
-    Connector c = getSerializedConnector();
-    try
-    {
-      return c.createScanner(TABLE, AUTHS);
-    }
-    catch (TableNotFoundException e)
-    {
-      return null;
-    }
-  }
-
-  protected Scanner setUpJTSFilter(Scanner s, String latitude, String longitude, boolean change_name)
-  {
-    try
-    {
-  
-      s.setScanIterators(50, FilteringIterator.class.getName(), "gvdf");
-      s.setScanIteratorOption("gvdf", "0", JTSFilter.class.getName());
-      s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONCenterPointLat, latitude);
-      s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONCenterPointLon, longitude);
-      if (change_name)
-          s.setScanIteratorOption("gvdf", "0." + JTSFilter.OPTIONGeometryKeyName, "beam-footprint");
-
-
-    }
-    catch (IOException e)
-    {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-    }
-    return s;
-  }
-
-  protected String checkSerialized(Scanner s)
-  {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (Entry<Key, Value> e : s)
-    {
-
-      if (!first)
-      {
-        sb.append(",");
-      }
-      else
-      {
-        first = false;
-      }
-
-      String colq = e.getKey().getColumnQualifier().toString();
-
-      sb.append(colq);
-    }
-    return sb.toString();
-  }
-
-
-  @Test
-  public void testNoResults()
-  {
-    //London is in niether - 51°30'0.00"N   0° 7'0.00"W
-    String latitude = "51.5";
-    String longitude = "0.11";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-//    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).isEmpty());
-  }
-
-
-  @Test
-  public void testOneResultAmerica()
-  {
-    //This is North America
-    //Points  39°44'21.00"N 104°59'3.00"W (Denver) are in the footprint
-    String latitude = "33";
-    String longitude = "-93.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("02"));
-  }
-
-
-  @Test
-  public void testOneResultAustralia()
-  {
-    //This is Australia
-    //Points like 22S 135E are in the beam
-    String latitude = "-9";
-    String longitude = "100.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, false);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("01"));
-  }
-
-  @Test
-  public void testOneResultHawaii()
-  {
-    // -164 40 - somewhere near hawaii
-
-    //This is Australia
-    //Points like 22S 135E are in the beam
-    String latitude = "40";
-    String longitude = "-164.0";
-
-    Scanner s = setUpJTSFilter(getSerializedScanner(), latitude, longitude, true);
-    s.setRange(new Range());
-
-    System.out.println("{" + checkSerialized(s) + "}");
-    assertTrue(checkSerialized(s).equals("03"));
-  }
-
-
-  @Test
-  public void testDummyTest()
-  {
-    assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/OGCFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/OGCFilterTest.java b/partition/common-query/src/test/java/OGCFilterTest.java
deleted file mode 100644
index fd54945..0000000
--- a/partition/common-query/src/test/java/OGCFilterTest.java
+++ /dev/null
@@ -1,163 +0,0 @@
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-
-import static org.junit.Assert.*;
-
-public class OGCFilterTest {
-	private Key testKey = new Key(new Text("row"), new Text("colf"), new Text("colq"));
-	private Value testValue = new Value("uuid~event\uFFFDmy-event-hash-1\u0000date\uFFFD20100819\u0000time~dss\uFFFD212706.000\u0000frequency\uFFFD3.368248181443644E8\u0000latitude\uFFFD48.74571142707959\u0000longitude\uFFFD13.865561564126812\u0000altitude\uFFFD1047.0\u0000datetime\uFFFD2010-08-19T21:27:06.000Z\u0000test~key\uFFFD\u0000key\uFFFDa\uFFFDb".getBytes());
-
-	public OGCFilterTest() {
-
-	}
-
-	private OGCFilter getFilter(String filter) {
-		OGCFilter f = new OGCFilter();
-		Map<String, String> options = new HashMap<String, String>();
-		options.put(OGCFilter.OPTION_FILTER, filter);
-		f.init(options);
-		return f;
-	}
-
-	@Test
-	public void testBBOX() {
-		OGCFilter f = getFilter("<BBOX><gml:Envelope>"
-			+ "<gml:LowerCorner>13 48</gml:LowerCorner>"
-			+ "<gml:UpperCorner>14 49</gml:UpperCorner>"
-			+ "</gml:Envelope></BBOX>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testBetweenStr() {
-		OGCFilter f = getFilter("<PropertyIsBetween><PropertyName>datetime</PropertyName>"
-			+ "<LowerBoundary><Literal>2010-08-19</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>2010-08-20</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testBetweenNum() {
-		OGCFilter f = getFilter("<PropertyIsBetween><PropertyName>frequency</PropertyName>"
-			+ "<LowerBoundary><Literal>330000000</Literal></LowerBoundary>"
-			+ "<UpperBoundary><Literal>340000000</Literal></UpperBoundary>"
-			+ "</PropertyIsBetween>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testEqualStr() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>uuid~event</PropertyName><Literal>my-event-hash-1</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testEqualNum() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testGreaterThanStr() {
-		OGCFilter f = getFilter("<PropertyIsGreaterThan><PropertyName>datetime</PropertyName><Literal>2010-08-15</Literal></PropertyIsGreaterThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testGreaterThanNum() {
-		OGCFilter f = getFilter("<PropertyIsGreaterThan><PropertyName>altitude</PropertyName><Literal>1000</Literal></PropertyIsGreaterThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLessThanStr() {
-		OGCFilter f = getFilter("<PropertyIsLessThan><PropertyName>datetime</PropertyName><Literal>2010-08-20</Literal></PropertyIsLessThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLessThanNum() {
-		OGCFilter f = getFilter("<PropertyIsLessThan><PropertyName>altitude</PropertyName><Literal>1200</Literal></PropertyIsLessThan>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testLike() {
-		OGCFilter f = getFilter("<PropertyIsLike><PropertyName>uuid~event</PropertyName><Literal>*event*</Literal></PropertyIsLike>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNotEqualNum() {
-		OGCFilter f = getFilter("<PropertyIsNotEqualTo><PropertyName>altitude</PropertyName><Literal>1046</Literal></PropertyIsNotEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNull() {
-		OGCFilter f = getFilter("<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNot() {
-		OGCFilter f = getFilter("<Not><PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo></Not>");
-		assertFalse(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testAnd() {
-		OGCFilter f = getFilter("<And>"
-			+ "<PropertyIsEqualTo><PropertyName>altitude</PropertyName><Literal>1047</Literal></PropertyIsEqualTo>"
-			+ "<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>"
-			+ "</And>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testOr() {
-		OGCFilter f = getFilter("<Or>"
-			+ "<PropertyIsLike><PropertyName>uuid~event</PropertyName><Literal>*event*</Literal></PropertyIsLike>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "</Or>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNand() {
-		OGCFilter f = getFilter("<Not><And>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "<PropertyIsNull><PropertyName>test~key</PropertyName></PropertyIsNull>"
-			+ "</And></Not>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testNor() {
-		OGCFilter f = getFilter("<Not>"
-			+ "<PropertyIsNull><PropertyName>uuid~event</PropertyName></PropertyIsNull>"
-			+ "<PropertyIsNull><PropertyName>altitude</PropertyName></PropertyIsNull>"
-			+ "</Not>");
-
-		assertTrue(f.accept(testKey, testValue));
-	}
-
-	@Test
-	public void testParse() {
-		OGCFilter f = getFilter("<PropertyIsEqualTo><PropertyName>key</PropertyName><Literal>a</Literal></PropertyIsEqualTo>");
-		assertTrue(f.accept(testKey, testValue));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/SampleData.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/SampleData.java b/partition/common-query/src/test/java/SampleData.java
deleted file mode 100644
index 071076b..0000000
--- a/partition/common-query/src/test/java/SampleData.java
+++ /dev/null
@@ -1,228 +0,0 @@
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-
-public class SampleData {
-	public static int NUM_PARTITIONS = 2;
-	public static int NUM_SAMPLES = 10;
-	
-	public static Connector initConnector() {
-		Instance instance = new MockInstance();
-		
-		try {
-			Connector connector = instance.getConnector("root", "password".getBytes());
-			
-			// set up table
-			connector.tableOperations().create("partition");
-			connector.tableOperations().create("provenance");
-			
-			// set up root's auths
-			connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-			
-			return connector;
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableExistsException e) {
-			e.printStackTrace();
-		}
-		
-		return null;
-	}
-	
-	public static Collection<Map<String, String>> sampleData() {
-		List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-		Map<String, String> item;
-		
-		for (int i = 0; i < NUM_SAMPLES; i++) {
-			item = new HashMap<String, String>();
-			for (int j = 0; j < 5; j++) {
-				item.put("field" + j , new String(new char[] {(char) ('A' + ((j + i) % 26))}));
-			}
-			list.add(item);
-		}
-		return list;
-	}
-	
-	public static void writeDenCellLevel(Connector connector, Collection<Map<String, String>> data) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			int count = 0;
-			Mutation m;
-			for (Map<String, String> object: data) {
-				count++;
-				String id = (count < 10 ? "0" + count: "" + count);
-				Text partition = new Text("" + (count % NUM_PARTITIONS));
-				
-				// write dummy record
-				m = new Mutation(partition);
-				m.put("event", id, "");
-				writer.addMutation(m);
-				
-				for (Entry<String, String> entry: object.entrySet()) {
-					// write the event mutation
-					m = new Mutation(partition);
-					m.put("event", id + "\u0000" + entry.getKey(), entry.getValue());
-					writer.addMutation(m);
-					
-					// write the general index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-					
-					// write the specific index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-				}
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			int count = 0;
-			Mutation m;
-			for (Map<String, String> object: data) {
-				count++;
-				String id = (count < 10 ? "0" + count: "" + count);
-				Text partition = new Text("" + (count % NUM_PARTITIONS));
-				
-				StringBuilder value = new StringBuilder();
-				boolean first = true;
-				for (Entry<String, String> entry: object.entrySet()) {
-					if (!first) {
-						value.append("\u0000");
-					} else {
-						first = false;
-					}
-					value.append(entry.getKey());
-					value.append("\uFFFD");
-					value.append(entry.getValue());
-					
-					// write the general index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-					
-					// write the specific index mutation
-					m = new Mutation(partition);
-					m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-					writer.addMutation(m);
-				}
-				
-				// write the event mutation
-				m = new Mutation(partition);
-				m.put("event", id, value.toString());
-				writer.addMutation(m);
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeDenProvenance(Connector connector) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("provenance");
-			} else {
-				writer = connector.createBatchWriter("provenance", 200000, 10000, 1);
-			}
-			Mutation m;
-			for (int sid = 1; sid <= 2; sid++) {
-				for (int time = 1; time <= 3; time++) {
-					for (int uuid = 1; uuid <= (6 + 2 * time); uuid++) {
-						m = new Mutation(new Text("sid" + sid));
-						m.put("time" + time, "uuid-" + Integer.toHexString(uuid), "");
-						writer.addMutation(m);
-					}
-				}
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	public static void writeMinIndexes(Connector connector) {
-		// write sample data
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-		try {
-			BatchWriter writer;
-			if (mtbw != null) {
-				writer = mtbw.getBatchWriter("partition");
-			} else {
-				writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-			}
-			Mutation m;
-			for (int i = 1; i <= NUM_SAMPLES; i++) {
-				m = new Mutation(new Text("" + (i % NUM_PARTITIONS)));
-				
-				String id = (i < 10 ? "0" + i: "" + i);
-				
-				m.put("index", "z_" + id + "_rdate\u0000" + id, "");
-				writer.addMutation(m);
-			}
-			writer.close();
-		} catch (CBException e) {
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			e.printStackTrace();
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/SampleGVData.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/SampleGVData.java b/partition/common-query/src/test/java/SampleGVData.java
deleted file mode 100644
index d8168de..0000000
--- a/partition/common-query/src/test/java/SampleGVData.java
+++ /dev/null
@@ -1,182 +0,0 @@
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-// For use in testing the Date Filter and Frequency Filter classes
-public class SampleGVData
-{
-
-  public static int NUM_PARTITIONS = 2;
-
-
-  public static Connector initConnector()
-  {
-    Instance instance = new MockInstance();
-
-    try
-    {
-      Connector connector = instance.getConnector("root", "password".getBytes());
-
-      // set up table
-      connector.tableOperations().create("partition");
-
-      // set up root's auths
-      connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-
-      return connector;
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableExistsException e)
-    {
-      e.printStackTrace();
-    }
-
-    return null;
-  }
-
-  public static Collection<Map<String, String>> sampleData()
-  {
-    List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-    Map<String, String> item;
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-
-    //This one is like RB
-    item.put("date-start",  "2009-01-01");
-    item.put("date-end",    "2011-02-24");
-    item.put("date-update", "2011-02-24T00:00:00Z");
-    item.put("frequency",  "1250000000");
-    item.put("bandwidth",   "500000000");
-    item.put("version",     "1");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    list.add(item);
-
-    //This one is like GV
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    item.put("date-start",  "2010-01-01");
-    item.put("date-update", "2010-01-23");
-    item.put("frequency",  "1150000000");
-    item.put("bandwidth",   "300000000");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    item.put("date-start",  "2009-01-01");
-    item.put("date-end",    "2011-02-24");
-    item.put("date-update", "2008-01-23");
-    list.add(item);
-
-    item = new HashMap<String, String>();
-    item.put("a",  "a");
-    item.put("b",  "b");
-    list.add(item);
-
-    return list;
-  }
-
-
-  public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data)
-  {
-    // write sample data
-    MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-    try
-    {
-      BatchWriter writer;
-      if (mtbw != null)
-      {
-        writer = mtbw.getBatchWriter("partition");
-      }
-      else
-      {
-        writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-      }
-      int count = 0;
-      Mutation m;
-      for (Map<String, String> object : data)
-      {
-        count++;
-        String id = (count < 10 ? "0" + count : "" + count);
-        Text partition = new Text("" + (count % NUM_PARTITIONS));
-
-        StringBuilder value = new StringBuilder();
-        boolean first = true;
-        for (Entry<String, String> entry : object.entrySet())
-        {
-          if (!first)
-          {
-            value.append("\u0000");
-          }
-          else
-          {
-            first = false;
-          }
-          value.append(entry.getKey());
-          value.append("\uFFFD");
-          value.append(entry.getValue());
-
-          // write the general index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-
-          // write the specific index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-        }
-
-        // write the event mutation
-        m = new Mutation(partition);
-        m.put("event", id, value.toString());
-        writer.addMutation(m);
-      }
-      writer.close();
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableNotFoundException e)
-    {
-      e.printStackTrace();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/SampleJTSData.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/SampleJTSData.java b/partition/common-query/src/test/java/SampleJTSData.java
deleted file mode 100644
index 41df658..0000000
--- a/partition/common-query/src/test/java/SampleJTSData.java
+++ /dev/null
@@ -1,171 +0,0 @@
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-
-// For use in testing the Date Filter and Frequency Filter classes
-public class SampleJTSData
-{
-
-  public static int NUM_PARTITIONS = 2;
-
-
-  public static Connector initConnector()
-  {
-    Instance instance = new MockInstance();
-
-    try
-    {
-      Connector connector = instance.getConnector("root", "password".getBytes());
-
-      // set up table
-      connector.tableOperations().create("partition");
-
-      // set up root's auths
-      connector.securityOperations().changeUserAuthorizations("root", new Authorizations("ALPHA,BETA,GAMMA".split(",")));
-
-      return connector;
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableExistsException e)
-    {
-      e.printStackTrace();
-    }
-
-    return null;
-  }
-
-  public static Collection<Map<String, String>> sampleData()
-  {
-    List<Map<String, String>> list = new ArrayList<Map<String, String>>();
-    Map<String, String> item;
-
-    item = new HashMap<String, String>();
-    item.put("geometry-contour",  "SDO_GEOMETRY(2007, 8307, NULL, SDO_ELEM_INFO_ARRAY(1, 1003, 1), SDO_ORDINATE_ARRAY(91.985, -12.108, 94.657, -12.059, 98.486, -11.988, 101.385, -12.296, 102.911, -12.569, 103.93, -12.852, 105.005, -12.531, 106.37, -12.204, 108.446, -11.503, 109.585, -10.88, 110.144, -10.207, 108.609, -9.573, 106.05, -8.535, 104.145, -7.606, 102.191, -7.522, 99.522, -7.691, 97.64, -7.606, 95.482, -7.947, 94.546, -8.084, 92.465, -8.605, 90.554, -9.366, 90.197, -10.436, 89.84, -11.729, 90.554, -12.175, 91.985, -12.108))");
-    item.put("beam-name",    "OPTUS D1 Ku-BAND NATIONAL A & B AUSTRALIA Downlink");
-    list.add(item);
-    //This is Australia
-    //Points like 22S 135E are in the beam
-
-    //This one is like GV
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "AMC 1 Ku-BAND ZONAL NORTH AMERICA Down HV");
-    item.put("geometry-contour",   "SDO_GEOMETRY(2007, 8307, NULL, SDO_ELEM_INFO_ARRAY(1, 1003, 1), SDO_ORDINATE_ARRAY(-70.838, 39.967, -70.506, 40.331, -70.698, 41.679, -71.179, 42.401, -71.578, 42.38, -72.994, 42.924, -74.353, 43.242, -75.715, 43.26, -77.318, 42.981, -78.684, 42.774, -80.05, 42.491, -82.005, 42.517, -83.608, 42.312, -84.977, 41.805, -86.58, 41.525, -88.127, 41.02, -89.731, 40.741, -90.905, 41.582, -92.264, 41.9, -93.861, 42.147, -95.411, 41.341, -96.257, 40.076, -97.222, 38.737, -98.011, 37.17, -98.031, 35.593, -97.691, 34.312, -96.875, 33.25, -97.307, 31.904, -97.916, 30.561, -98.702, 29.295, -99.134, 27.949, -98.14, 26.884, -97.205, 25.821, -95.842, 25.803, -94.42, 25.784, -92.876, 26.064, -91.277, 26.043, -90.085, 26.553, -88.729, 26.01, -87.38, 24.941, -86.031, 23.797, -84.616, 23.253, -83.256, 23.01, -81.887, 23.517, -80.866, 24.555, -80.254, 26.124, -79.642, 27.693, -78.444, 28.728, -77.486, 29.542, -76.463, 30.805, -76.088, 32.377, -75.656, 33.723, -76.051,
  35.305, -75.442, 36.649, -74.426, 37.386, -73.228, 38.422, -72.032, 39.232, -70.838, 39.967))");
-    list.add(item);
-    //This is North America
-    //Points  39°44'21.00"N 104°59'3.00"W (Denver) are in the footprint
-
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "testa");
-    item.put("beam-footprint",   "MULTIPOLYGON (((-169.286 40.431, -164.971 39.992, -155.397 38.482, -146.566 36.233, -136.975 32.539, -128.124 27.742, -121.946 24.548, -116.849 21.339, -112.156 17.479, -109.391 14.206, -107.301 11.715, -105.274 9.477, -103.443 8.229, -102.108 7.7, -99.109 7.428, -96.681 7.745, -93.894 8.843, -89.917 11.687, -85.953 15.017, -81.148 17.266, -78.145 17.986, -75.582 17.887, -68.1 17.987, -64.696 18.493, -61.445 19.38, -60.094 20.288, -59.315 21.564, -57.026 26.51, -55.089 30.962, -53.59 33.657, -52.495 34.691, -50.468 36.204, -46.146 38.672, -41.684 40.663, -37.914 42.055, -33.806 43.082, -27.523 44.149, -21.645 44.96, -16.578 45.406, -13.807 45.771, -14.929 50.108, -16.186 53.919, -17.051 56.0, -18.388 58.824, -19.861 61.567, -21.807 64.188, -23.104 65.742, -25.28 67.904, -27.699 69.823, -28.955 70.728, -32.415 72.768, -34.968 73.998, -38.468 75.309, -48.292 73.025, -56.545 71.12, -64.023 70.474, -72.753 70.357, -78.41 70.827, -80.466 71.093, -82.412 
 71.876, -83.02 72.944, -83.175 74.04, -82.493 74.782, -82.412 75.552, -82.697 76.778, -84.041 78.398, -86.316 81.078, -104.098 80.819, -110.861 80.482, -115.73 80.17, -120.936 79.669, -125.84 79.176, -126.696 79.02, -134.316 77.732, -139.505 76.478, -144.823 74.826, -148.231 73.417, -151.517 71.687, -153.87 70.165, -154.536 69.672, -155.868 68.678, -156.482 68.098, -158.281 66.421, -159.716 64.804, -160.996 63.126, -161.878 61.786, -163.046 59.875, -164.369 57.254, -165.563 54.479, -166.73 51.089, -167.811 47.267, -168.581 44.041, -169.286 40.431)), ((-171.333 23.244, -171.523 18.894, -170.127 18.986, -161.559 18.555, -156.977 18.134, -153.574 18.116, -151.108 18.324, -149.947 18.45, -149.018 18.957, -148.515 19.822, -148.524 20.914, -149.018 21.766, -149.947 22.272, -152.185 23.054, -155.563 23.434, -158.075 23.75, -160.272 24.034, -162.184 24.008, -163.514 23.99, -164.595 23.976, -166.52 23.687, -169.159 23.18, -171.333 23.244)))");
-    list.add(item);
-// this point should be in there...
-    // -164 40 - somewhere near hawaii
-
-    item = new HashMap<String, String>();
-    item.put("beam-name",  "testb");
-    item.put("beam-footprint",   "POLYGON ((-140.153 34.772, -140.341 33.272, -137.024 33.026, -132.723 32.369, -130.947 31.916, -128.664 31.225, -125.293 29.612, -121.813 27.871, -118.699 25.892, -115.589 23.79, -112.593 21.875, -109.136 19.335, -106.939 16.701, -105.006 14.97, -104.195 14.407, -103.049 13.659, -100.363 12.717, -98.063 12.288, -94.299 11.612, -90.825 11.097, -87.997 11.584, -86.815 12.109, -86.163 12.893, -85.014 14.342, -83.804 15.788, -82.104 16.998, -80.413 17.269, -78.005 16.574, -76.181 16.531, -74.65 16.68, -73.552 17.392, -72.957 18.3, -72.917 19.651, -73.526 21.325, -74.913 23.018, -76.036 24.519, -76.159 26.428, -75.741 28.447, -74.257 30.072, -72.771 31.331, -70.517 34.328, -69.638 36.04, -68.624 39.467, -68.015 41.851, -67.607 43.501, -67.548 45.528, -67.586 47.308, -68.601 49.066, -69.868 50.07, -71.621 50.778, -73.285 50.888, -74.9 50.926, -76.994 50.975, -79.332 50.846, -81.066 50.887, -83.842 51.136, -86.569 51.016, -87.95 50.864, -90.831 50.563, -94
 .27 50.644, -98.068 50.733, -102.937 51.032, -106.455 51.484, -109.973 51.936, -114.119 52.402, -117.363 53.031, -119.899 53.276, -123.243 53.539, -127.017 54.427, -130.519 55.431, -133.643 56.058, -134.826 56.279, -135.354 55.029, -135.792 53.864, -136.168965072136 52.8279962761917, -136.169 52.828, -136.169497186166 52.8264970826432, -136.192 52.763, -136.556548517884 51.6453176911637, -136.703232746756 51.2152965828266, -136.781220290925 50.9919311116929, -136.793 50.959, -136.80274055379 50.9259886895048, -136.992 50.295, -137.200898649547 49.5808675274021, -137.202 49.581, -137.200962495599 49.5806459535167, -137.360714473458 49.0197683891632, -137.459 48.677, -137.462166719028 48.6649126473121, -137.471 48.634, -137.515105536699 48.4619710228524, -137.74710368039 47.5528216167105, -137.793718522461 47.3758260237407, -137.854 47.152, -137.977773277882 46.6610808974241, -138.044 46.403, -138.330834102374 45.1674736036557, -138.365 45.019, -138.38180854655 44.9421315900087, -138.
 449801069917 44.6389849661384, -138.485 44.484, -138.497077239724 44.4262941289417, -138.536 44.25, -138.622787032392 43.8206200438395, -138.743816168807 43.232032787661, -138.981390224617 42.0843314825185, -138.989 42.048, -138.990605533614 42.0389442888447, -138.991 42.037, -138.997785044232 41.9994454595406, -139.004 41.969, -139.035645873997 41.7890661698517, -139.061212567475 41.6462082823816, -139.428 39.584, -139.673 38.073, -139.713116752585 37.8001474769807, -139.766 37.457, -139.764942047737 37.4567768906428, -139.898 36.573, -139.897723683259 36.5729429963606, -139.986 35.994, -140.04777653037 35.5462970502163, -140.094 35.232, -140.090797568766 35.2315144621917, -140.153 34.772))");
-    list.add(item);
-
-
-
-    //London is in niether - 51°30'0.00"N   0° 7'0.00"W
-    return list;
-  }
-
-
-  public static void writeDenSerialized(Connector connector, Collection<Map<String, String>> data)
-  {
-    // write sample data
-    MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000, 10000, 1);
-    try
-    {
-      BatchWriter writer;
-      if (mtbw != null)
-      {
-        writer = mtbw.getBatchWriter("partition");
-      }
-      else
-      {
-        writer = connector.createBatchWriter("partition", 200000, 10000, 1);
-      }
-      int count = 0;
-      Mutation m;
-      for (Map<String, String> object : data)
-      {
-        count++;
-        String id = (count < 10 ? "0" + count : "" + count);
-        Text partition = new Text("" + (count % NUM_PARTITIONS));
-
-        StringBuilder value = new StringBuilder();
-        boolean first = true;
-        for (Entry<String, String> entry : object.entrySet())
-        {
-          if (!first)
-          {
-            value.append("\u0000");
-          }
-          else
-          {
-            first = false;
-          }
-          value.append(entry.getKey());
-          value.append("\uFFFD");
-          value.append(entry.getValue());
-
-          // write the general index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-
-          // write the specific index mutation
-          m = new Mutation(partition);
-          m.put("index", entry.getKey() + "//" + entry.getValue() + "\u0000" + id, "");
-          writer.addMutation(m);
-        }
-
-        // write the event mutation
-        m = new Mutation(partition);
-        m.put("event", id, value.toString());
-        writer.addMutation(m);
-      }
-      writer.close();
-    }
-    catch (CBException e)
-    {
-      e.printStackTrace();
-    }
-    catch (CBSecurityException e)
-    {
-      e.printStackTrace();
-    }
-    catch (TableNotFoundException e)
-    {
-      e.printStackTrace();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/filter.txt
----------------------------------------------------------------------
diff --git a/partition/iterator-test/filter.txt b/partition/iterator-test/filter.txt
deleted file mode 100644
index b53773f..0000000
--- a/partition/iterator-test/filter.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-<BBOX>
-	<gml:Envelope>
-		<gml:LowerCorner>119 33</gml:LowerCorner>
-		<gml:UpperCorner>120 34</gml:UpperCorner>
-	</gml:Envelope>
-</BBOX>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/pom.xml
----------------------------------------------------------------------
diff --git a/partition/iterator-test/pom.xml b/partition/iterator-test/pom.xml
deleted file mode 100644
index daed27f..0000000
--- a/partition/iterator-test/pom.xml
+++ /dev/null
@@ -1,99 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-	<parent>
-		<artifactId>dss</artifactId>
-		<groupId>dss</groupId>
-		<version>2.0.0-SNAPSHOT</version>
-	</parent>
-	<modelVersion>4.0.0</modelVersion>
-	<groupId>dss.webservice</groupId>
-	<artifactId>iterator-test</artifactId>
-	<packaging>jar</packaging>
-	<name>webservice-test</name>
-	<version>0.2.0-SNAPSHOT</version>
-	<description />
-	
-	<properties>
-		<env>USER</env>
-	</properties>
-	
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-jar-plugin</artifactId>
-				<configuration>
-					<archive>
-						<manifest>
-							<mainClass>dss.webservice.itr.Main</mainClass>
-						</manifest>
-					</archive>
-				</configuration>
-			</plugin>
-			<plugin>
-				<artifactId>maven-dependency-plugin</artifactId>
-				<executions>
-					<execution>	
-						<id>unpack-dependencies</id>
-						<phase>generate-resources</phase>
-						<goals>
-							<goal>unpack-dependencies</goal>
-						</goals>
-					</execution>
-				</executions>
-			</plugin>
-		</plugins>
-		<resources>
-			<resource>
-				<directory>${basedir}/target/dependency</directory>
-			</resource>
-		</resources>
-	</build>
-	<dependencies>
-		<dependency>
-			<groupId>cloudbase</groupId>
-			<artifactId>cloudbase-core</artifactId>
-			<version>1.3.1</version>
-		</dependency>
-		<dependency>
-			<groupId>cloudbase</groupId>
-			<artifactId>cloudbase-start</artifactId>
-			<version>1.3.1</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.thrift</groupId>
-			<artifactId>thrift</artifactId>
-			<version>0.3</version>
-		</dependency>
-		<dependency>
-			<groupId>javax.servlet</groupId>
-			<artifactId>servlet-api</artifactId>
-			<scope>provided</scope>
-			<version>2.5</version>
-		</dependency>
-		<dependency>
-			<groupId>log4j</groupId>
-			<artifactId>log4j</artifactId>
-		</dependency>
-		<dependency>
-			<groupId>commons-logging</groupId>
-			<artifactId>commons-logging</artifactId>
-			<version>1.0.4</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-common</artifactId>
-			<version>0.20.1</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache</groupId>
-			<artifactId>zookeeper</artifactId>
-			<version>3.3.0</version>
-		</dependency>
-		<dependency>
-			<groupId>sitestore.common</groupId>
-			<artifactId>common-query</artifactId>
-			<version>2.0.0-SNAPSHOT</version>
-		</dependency>
-	</dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java b/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java
deleted file mode 100644
index 6b040fc..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/Main.java
+++ /dev/null
@@ -1,348 +0,0 @@
-package dss.webservice.itr;
-
-import java.io.File;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Scanner;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-
-import ss.cloudbase.core.iterators.CellLevelFilteringIterator;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.ConversionIterator;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedMinIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.PartialKey;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.iterators.filter.RegExFilter;
-import cloudbase.core.security.Authorizations;
-
-public class Main {
-	private static final Logger logger = Logger.getLogger(Main.class);
-	
-	private static String CB_INSTANCE = "INSTANCENAME"; // INSERT INSTANCE NAME
-	private static String ZK_SERVERS = "r02sv22:2181,r03sv23:2181,r04sv22:2181,r05sv23:2181";
-	private static String CB_USER = "user"; // SET USERNAME
-	private static String CB_PASS = "pass"; // SET PASSWORD
-	private static String CB_AUTH = "U,FOUO";
-	private static String CB_TABLE = "partition_gi";
-	
-	public static void main(String[] args) {
-		Map<String,String> request = new TreeMap<String, String>();
-		
-		int itrLevel = 50;
-		
-		for (String pair: args) {
-			String[] parts = pair.split("[=]");
-			if (parts.length == 1) {
-				request.put(parts[0], parts[0]);
-			} else if (parts.length == 2) {
-				request.put(parts[0], parts[1]);
-			}
-		}
-		
-		BatchScanner reader = null;
-		
-		String filter = request.remove("filter");
-		String terms = request.remove("terms");
-		String ranges = request.remove("ranges");
-		String partition = request.remove("partition");
-		String rangeFamily = request.remove("rangeFamily");
-		String prefix = request.remove("prefix");
-		String index = request.remove("index");
-		String test = request.remove("test");
-		String testKey = request.remove("testKey");
-		String convert = request.remove("convert");
-		String grep = request.remove("grep");
-		int print = -1;
-		
-		try {
-			print = Integer.parseInt(request.remove("print"));
-		} catch (NumberFormatException e) {
-			print = 0;
-		}
-	
-		boolean dryRun = request.remove("dryRun") != null;
-		boolean debug = request.remove("debug") != null;
-		boolean startInclusive = request.remove("start") != null;
-		boolean endInclusive = request.remove("end") != null;
-		boolean nodoc = request.remove("nodoc") != null;
-		boolean multiDoc = request.remove("multiDoc") != null;
-		boolean aggregate = request.remove("aggregate") != null;
-		
-		int threads = 5;
-		if (request.containsKey("threads")) {
-			threads = Integer.parseInt(request.remove("threads"));
-		}
-		
-		if (partition != null) {
-			partition = partition.replace(".", "\u0000");
-		}
-		
-		if (index != null) {
-			index = index.replace(':', '=');
-		}
-		
-		if (testKey != null) {
-			testKey = testKey.replace(".", "\u0000");
-		}
-		
-		if (request.containsKey("c")) {
-			CB_INSTANCE = request.remove("c");
-		}
-		
-		if (request.containsKey("z")) {
-			ZK_SERVERS = request.remove("z");
-		}
-		
-		if (request.containsKey("u")) {
-			CB_USER = request.remove("u");
-		}
-		
-		if (request.containsKey("p")) {
-			CB_PASS = request.remove("p");
-		}
-		
-		if (request.containsKey("s")) {
-			CB_AUTH = request.remove("s");
-		}
-		
-		if (request.containsKey("t")) {
-			CB_TABLE = request.remove("t");
-		}
-		
-		logger.info("Cloudbase Connection: ");
-		logger.info("\tc (instance):\t" + CB_INSTANCE);
-		logger.info("\tz (zk servers):\t" + ZK_SERVERS);
-		logger.info("\tu (user):\t" + CB_USER);
-		logger.info("\tp (pass):\t" + CB_PASS);
-		logger.info("\ts (auths):\t" + CB_AUTH);
-		logger.info("\tt (table):\t" + CB_TABLE);
-		
-		logger.info("Query Parameters:");
-		logger.info("\tindex:\t\t" + index);
-		logger.info("\tfilter:\t\t" + filter);
-		logger.info("\tterms:\t\t" + terms);
-		logger.info("\tgrep:\t\t" + grep);
-		logger.info("\tprefix:\t\t" + prefix);
-		logger.info("\tranges:\t\t" + ranges);
-		logger.info("\trangeFamily:\t" + rangeFamily);
-		logger.info("\tpartition:\t" + partition);
-		logger.info("\tstartInc:\t" + startInclusive);
-		logger.info("\tendInc:\t\t" + endInclusive);
-		logger.info("\tthreads:\t" + threads);
-		logger.info("\tprint:\t\t" + print);
-		logger.info("\tdryRun:\t\t" + dryRun);
-		logger.info("\tdebug:\t\t" + debug);
-		logger.info("\ttestKey:\t" + testKey);
-		logger.info("\tmultiDoc:\t" + multiDoc);
-		logger.info("\taggregate:\t" + aggregate);
-		logger.info("\tconvert:\t" + convert);
-		
-		logger.info("Unknown Parameters: ");
-		for (Entry<String,String> entry: request.entrySet()) {
-			logger.info("\t" + entry.getKey() + ":\t\t" + entry.getValue());
-		}
-		
-		if (debug) {
-			// set the cloudbase logging to trace
-			Logger.getLogger("cloudbase").setLevel(Level.TRACE);
-		}
-		
-		boolean iteratorSet = false;
-		
-		try {
-			ZooKeeperInstance zk = new ZooKeeperInstance(CB_INSTANCE, ZK_SERVERS);
-			Connector connector = new Connector(zk, CB_USER, CB_PASS.getBytes());
-			if (test != null) {
-				Test t = (Test) Class.forName("dss.webservice.itr.test." + test).newInstance();
-				t.runTest(request, connector, CB_TABLE, CB_AUTH);
-				logger.info("done.");
-				System.exit(0);
-			}
-			reader = connector.createBatchScanner(CB_TABLE, new Authorizations(CB_AUTH.split(",")), threads);
-	
-			Set<Range> partitionRanges = new HashSet<Range>();
-			if (partition != null) {
-				partition = partition.replace(".", "\u0000");
-				Key startKey = null;
-				Key endKey = null;
-				if (partition.contains(",")) {
-					startKey = new Key(new Text(partition.split(",")[0]));
-					endKey = new Key(new Text(partition.split(",")[1]));
-				} else {
-					startKey = new Key(new Text(partition));
-					endKey = startKey.followingKey(PartialKey.ROW);
-				}
-				
-				Range range = new Range(startKey, true, endKey, false);
-				if (testKey != null) {
-					Key kTest = new Key(new Text(testKey));
-					if (range.contains(kTest)) {
-						logger.info("Key " + kTest + " is in the current range");
-					} else {
-						logger.info("Key " + kTest + " is not in the current range");
-					}
-				}
-				partitionRanges.add(range);
-			} else {
-				partitionRanges.add(new Range());
-			}
-	
-			if (terms != null && terms.trim().length() > 0) {
-				String[] parts = terms.trim().split(",");
-				if (parts.length == 1) {
-					logger.info("Creating range iterator from '" + parts[0] + "' to '" + parts[0] + "\\u0000'.");
-					reader.setScanIterators(itrLevel++, SortedRangeIterator.class.getName(), "ri");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, "event");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, "index");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, parts[0]);
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, parts[0] + "\u0000");
-					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + multiDoc);
-					iteratorSet = true;
-				} else if (parts.length > 1) {
-					logger.info("Creating intersecting iterator from all terms");
-					Text[] t = new Text[parts.length];
-					for (int i = 0; i < parts.length; i++) {
-						if (parts[i].startsWith("range")) {
-							parts[i] = parts[i].replace("_", "\u0000");
-						}
-						
-						t[i] = new Text(parts[i]);
-						logger.info("Adding Term: " + parts[i]);
-					}
-
-					reader.setScanIterators(itrLevel++, GMDenIntersectingIterator.class.getName(), "ii");
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, "event");
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, "index");
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(t));
-					reader.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + multiDoc);
-					iteratorSet = true;
-				}
-			} else if (ranges != null && ranges.trim().length() > 0) {
-				// set up a range iterator
-				logger.info("Creating range iterator on " + (rangeFamily != null ? rangeFamily: "index") + " for all ranges startInclusive: " + startInclusive + " endInclusive: " + endInclusive);
-				String[] parts = ranges.trim().split(",");
-				if (parts.length > 1 && parts.length % 2 == 0) {
-//					reader.setScanIterators(itrLevel++, RangeIterator.class.getName(), "ri");
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_INDEX_COLF, rangeFamily != null ? rangeFamily: "index");
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_START_INCLUSIVE, "" + startInclusive);
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_END_INCLUSIVE, "" + endInclusive);
-//					reader.setScanIteratorOption("ri", RangeIterator.OPTION_RANGES, RangeIterator.encodeRanges(parts));
-					
-					reader.setScanIterators(itrLevel++, SortedRangeIterator.class.getName(), "ir");
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_COLF, rangeFamily != null ? rangeFamily: "index");
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_START_INCLUSIVE, "" + startInclusive);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_END_INCLUSIVE, "" + endInclusive);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_LOWER_BOUND, parts[0]);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_UPPER_BOUND, parts[1]);
-					reader.setScanIteratorOption("ir", SortedRangeIterator.OPTION_MULTI_DOC, "" + multiDoc);
-					iteratorSet = true;
-				} else {
-					throw new RuntimeException("A start and end range must be given for each range");
-				}
-			} else if (index != null && index.trim().length() > 0 && partition != null) {
-				// look for an index on a partition
-				
-				// get out the ranges and add the index colf and term colq
-				Range r = partitionRanges.iterator().next();
-				Key start = new Key (r.getStartKey().getRow(), new Text("index"), new Text(index));
-				Key end = new Key (r.getStartKey().getRow(), new Text("index"), new Text(index + "\uFFFD"));
-				partitionRanges.clear();
-				partitionRanges.add(new Range(start, true, end, false));
-				iteratorSet = true;
-				
-			} else if (prefix != null && prefix.trim().length() > 0) {
-				logger.info("Setting a min iterator on " + prefix);
-				reader.setScanIterators(itrLevel++, SortedMinIterator.class.getName(), "mi");
-				reader.setScanIteratorOption("mi", SortedMinIterator.OPTION_PREFIX, prefix);
-				reader.setScanIteratorOption("mi", SortedMinIterator.OPTION_MULTI_DOC, "" + multiDoc);
-				iteratorSet = true;
-			}
-			
-			if (aggregate) {
-				reader.setScanIterators(itrLevel++, CellLevelRecordIterator.class.getName(), "aggregator");
-			}
-			
-			if (filter != null && filter.trim().length() > 0) {
-				logger.info("Creating filtering iterator from filter in " + filter);
-				Scanner scanner = new Scanner(new File(filter));
-				
-				filter = "";
-				while (scanner.hasNextLine()) {
-					filter += scanner.nextLine().trim(); 
-				}
-				
-				// set up a filtering iterator
-				logger.info("Filer = " + filter);
-				
-				if (multiDoc && !aggregate) {
-					reader.setScanIterators(itrLevel++, CellLevelFilteringIterator.class.getName(), "fi");
-					reader.setScanIteratorOption("fi", CellLevelFilteringIterator.OPTION_FILTER, filter);
-				} else {
-					reader.setScanIterators(itrLevel++, FilteringIterator.class.getName(), "fi");
-					reader.setScanIteratorOption("fi", "0", OGCFilter.class.getName());
-					reader.setScanIteratorOption("fi", "0." + OGCFilter.OPTION_FILTER, filter);
-//					reader.setScanIteratorOption("fi", "1", RegExFilter.class.getName());
-//					reader.setScanIteratorOption("fi", "1." + RegExFilter.ROW_REGEX, "theRegex");
-				}
-				iteratorSet = true;
-			}
-			
-			if (convert != null && convert.trim().length() > 0) {
-				convert = convert.replaceAll("_", " ");
-				String[] conversions = convert.split(",");
-				reader.setScanIterators(itrLevel++, ConversionIterator.class.getName(), "ci");
-				reader.setScanIteratorOption("ci", ConversionIterator.OPTION_CONVERSIONS, ConversionIterator.encodeConversions(conversions));
-				reader.setScanIteratorOption("ci", ConversionIterator.OPTION_MULTI_DOC, "" + (multiDoc && ! aggregate));
-			}
-			
-			logger.info("Setting range to: " + partitionRanges.iterator().next());
-			reader.setRanges(partitionRanges);
-			
-			if (!iteratorSet) {
-				reader.fetchColumnFamily(new Text("event"));
-			}
-			if (!dryRun) {
-				long start = System.currentTimeMillis();
-				int count = 0;
-				String id = null;
-				for (Entry<Key, Value> entry: reader) {
-					count++;
-					if (print == -1 || count <= print) {
-						String text = entry.getKey() + "\t" + entry.getValue();
-						
-						if ((grep != null && text.contains(grep)) || grep == null) {
-							logger.info(text);
-						}
-					}
-				}
-				reader.close();
-				logger.info("Time: " + (System.currentTimeMillis() - start) + " ms");
-				logger.info("Count: " + count);
-			} else if (!iteratorSet) {
-				logger.info("No iterator was set from the provided parameters (and I'm not doing a full table scan... so there).");
-			} else {
-				logger.info("Dry run complete.");
-			}
-			logger.info("Done");
-			System.exit(0);
-		} catch (Exception e) {
-			logger.error(e, e);
-			System.exit(1);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java b/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java
deleted file mode 100644
index 0b036b7..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/Test.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package dss.webservice.itr;
-
-import java.util.Map;
-
-import cloudbase.core.client.Connector;
-
-public interface Test {
-	void runTest(Map<String, String> request, Connector connector, String table, String auths);
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java b/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java
deleted file mode 100644
index 2139528..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/test/AddTestRecords.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package dss.webservice.itr.test;
-
-import java.util.Map;
-
-import org.apache.hadoop.io.Text;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.MultiTableBatchWriter;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.ColumnVisibility;
-import dss.webservice.itr.Test;
-
-public class AddTestRecords implements Test {
-
-	@Override
-	public void runTest(Map<String, String> request, Connector connector, String table, String auths) {
-		MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(200000000, 500, 4);
-		try {
-			BatchWriter writer = mtbw.getBatchWriter(table);
-			Mutation m = new Mutation(new Text("elint//rdate:79899179//geokey:20223312022200"));
-			m.put(new Text("event"), new Text("02eacfa1-b548-11df-b72e-002219501672"), new ColumnVisibility(new Text("U&FOUO")), new Value("uuid~event\uFFFD02eacfa1-b548-11df-b72e-002219501672\u0000date\uFFFD20100820\u0000time~dss\uFFFD010226.000\u0000technology\uFFFDelint\u0000feedName\uFFFDParserBinarySpSigFlat\u0000systemName\uFFFDSP\u0000pddg\uFFFDBJ\u0000latitude\uFFFD46.79429069085071\u0000longitude\uFFFD9.852863417535763\u0000altitude\uFFFD1841.0\u0000geoerror~semimajor\uFFFD3709.1270902747297\u0000geoerror~semiminor\uFFFD1896.9438653491684\u0000geoerror~tilt\uFFFD68.68795738630202\u0000frequency\uFFFD\u0000cenot_elnot\uFFFD008LJ\u0000datetime\uFFFD2010-08-20T01:02:26.000Z".getBytes()));
-			
-			writer.addMutation(m);
-			mtbw.flush();
-			mtbw.close();
-		} catch (CBException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		} catch (CBSecurityException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		} catch (TableNotFoundException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java b/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java
deleted file mode 100644
index a8f01a9..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/test/BaseTileTest.java
+++ /dev/null
@@ -1,132 +0,0 @@
-package dss.webservice.itr.test;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import dss.webservice.itr.Test;
-
-public class BaseTileTest implements Test {
-	private static final Logger logger = Logger.getLogger(BaseTileTest.class);
-	
-	String comboIndexTable = "index_v2";
-	String type = "hpcp";
-	
-	@Override
-	public void runTest(Map<String, String> request, Connector connector, String table, String auths) {
-		if (!request.containsKey("dates")) {
-			logger.warn("No 'dates' parameter supplied. e.g. dates=20100720,20100721...");
-			return;
-		}
-		
-		if (request.containsKey("type")) {
-			type = request.get("type");
-		}
-		
-		String[] dates = request.get("dates").split(",");		
-		
-		List<Long> comboTimes = new ArrayList<Long>();
-		List<Long> partTimes = new ArrayList<Long>();
-		List<Long> comboCounts = new ArrayList<Long>();
-		List<Long> partCounts = new ArrayList<Long>();
-		List<String> errors = new ArrayList<String>();
-		try {
-			for (String date: dates) {
-				long rdate = 99999999 - Long.parseLong(date);
-				for (int g = 0; g < 8; g++) {
-					String begin = type + "//rdate:" + rdate + "//geokey:" + g;
-					String end   = type + "//rdate:" + rdate + "//geokey:" + (g+1);
-					long count = 0;
-					Set<Range> ranges = new HashSet<Range>();
-
-					logger.info("Running test for " + begin + " ...");
-					// run combo index test
-					BatchScanner reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), 30);
-					ranges.add(new Range(new Key(new Text(begin)), true, new Key(new Text(end)), false));
-					
-					reader.setRanges(ranges);
-					long start = System.currentTimeMillis();
-					for (Entry<Key, Value> entry: reader) {
-						count++;
-					}
-					comboTimes.add(System.currentTimeMillis() - start);
-					comboCounts.add(count);
-					
-					logger.info("\tC count=" + count + " time=" + comboTimes.get(comboTimes.size() - 1) + " ms");
-					
-					count = 0;
-					
-					// run partition index test
-//					reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), 30);
-//					
-//					reader.setScanIterators(3, SortedRangeIterator.class.getName(), "ri");
-//					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, begin.replace("geokey", "geoKey"));
-//					reader.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, end.replace("geokey", "geoKey"));
-//					
-//					ranges.clear();
-//					ranges.add(new Range(new Key(new Text("date:" + date)), true, new Key(new Text("date:" + date + "z")), false));
-//					reader.setRanges(ranges);
-//					
-//					start = System.currentTimeMillis();
-//					for (Entry<Key, Value> entry: reader) {
-//						count++;
-//					}
-//					partTimes.add(System.currentTimeMillis() - start);
-//					partCounts.add(count);
-//					
-//					if (count != comboCounts.get(comboCounts.size() - 1)) {
-//						String msg = "Counts differed for " + begin + " C: " + comboCounts.get(comboCounts.size() - 1) + " P: " + count; 
-//						logger.warn(msg);
-//						errors.add(msg);
-//					}
-//					logger.info("\tP count=" + count + " time=" + partTimes.get(partTimes.size() - 1) + " ms");
-				}
-			}
-			
-			logger.info("********************* RESULTS *********************");
-			logger.info("Tested all 0 level tiles on " + type + " for " + request.get("dates"));
-			//logger.info("This is a test of SortedRangeIterator performance");
-			
-			double comboSum = 0, partSum = 0;
-			for (int i = 0; i < comboTimes.size(); i++) {
-				comboSum += comboTimes.get(i);
-				//partSum += partTimes.get(i);
-			}
-			
-			logger.info("Average C Time: " + (comboSum / comboTimes.size()) + " ms");
-			//logger.info("Average P Time: " + (partSum / partTimes.size()) + " ms");
-			
-			comboSum = 0; 
-			partSum = 0;
-			
-			for (int i = 0; i < comboCounts.size(); i++) {
-				comboSum += comboCounts.get(i);
-				//partSum += partCounts.get(i);
-			}
-			
-			logger.info("Average C Count: " + (comboSum / comboCounts.size()));
-			//logger.info("Average P Count: " + (partSum / partCounts.size()));
-			
-			if (errors.size() > 0) {
-				logger.warn("ERRORS!!!:");
-				for (String e: errors) {
-					logger.warn(e);
-				}
-			}
-		} catch (Exception e) {
-			logger.error(e, e);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java
----------------------------------------------------------------------
diff --git a/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java b/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java
deleted file mode 100644
index 01abb31..0000000
--- a/partition/iterator-test/src/main/java/dss/webservice/itr/test/ConversionTest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-package dss.webservice.itr.test;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.apache.log4j.Logger;
-
-import ss.cloudbase.core.iterators.ConversionIterator;
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import dss.webservice.itr.Test;
-
-public class ConversionTest implements Test {
-	private static final Logger logger = Logger.getLogger(ConversionTest.class);
-	
-	String comboIndexTable = "index_v3";
-	String type = "hpcp";
-	
-	@Override
-	public void runTest(Map<String, String> request, Connector connector, String table, String auths) {
-		if (!request.containsKey("dates")) {
-			logger.warn("No 'dates' parameter supplied. e.g. dates=20100720,20100721...");
-			return;
-		}
-		
-		if (request.containsKey("type")) {
-			type = request.get("type");
-		}
-		
-		int threads = 5;
-		if (request.containsKey("threads")) {
-			threads = Integer.parseInt(request.remove("threads"));
-		}
-		
-		String[] dates = request.get("dates").split(",");		
-		
-		List<Long> baseTimes = new ArrayList<Long>();
-		List<Long> convertTimes = new ArrayList<Long>();
-		List<Long> baseCounts = new ArrayList<Long>();
-		List<Long> convertCounts = new ArrayList<Long>();
-		List<String> errors = new ArrayList<String>();
-		
-		List<Value> values = new ArrayList<Value>();
-		
-		try {
-			for (String date: dates) {
-				long rdate = 99999999 - Long.parseLong(date);
-				for (int g = 0; g < 8; g++) {
-					String begin = type + "//rdate:" + rdate + "//geokey:" + g;
-					String end   = type + "//rdate:" + rdate + "//geokey:" + (g+1);
-					long count = 0;
-					Set<Range> ranges = new HashSet<Range>();
-
-					logger.info("Running test for " + begin + " ...");
-					// run combo index test
-					BatchScanner reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), threads);
-					ranges.add(new Range(new Key(new Text(begin)), true, new Key(new Text(end)), false));
-					
-					reader.setRanges(ranges);
-					values.clear();
-					long start = System.currentTimeMillis();
-					for (Entry<Key, Value> entry: reader) {
-						values.add(entry.getValue());
-						count++;
-					}
-					baseTimes.add(System.currentTimeMillis() - start);
-					baseCounts.add(count);
-					
-					logger.info("\tBase    count=" + count + " time=" + baseTimes.get(baseTimes.size() - 1) + " ms");
-					
-					count = 0;
-					for (Value value: values) {
-						logger.info("\t"  + value.toString());
-						count++;
-						if (count == 2) {
-							break;
-						}
-					}
-					
-					count = 0;
-					values.clear();
-					
-					reader = connector.createBatchScanner(table, new Authorizations(auths.split(",")), threads);
-					ranges.add(new Range(new Key(new Text(begin)), true, new Key(new Text(end)), false));
-					
-					reader.setScanIterators(50, ConversionIterator.class.getName(), "ci");
-					reader.setScanIteratorOption("ci", ConversionIterator.OPTION_CONVERSIONS, ConversionIterator.encodeConversions(new String[] {
-						"frequency / 1000000"
-					}));
-					
-					reader.setRanges(ranges);
-					values.clear();
-					start = System.currentTimeMillis();
-					for (Entry<Key, Value> entry: reader) {
-						values.add(entry.getValue());
-						count++;
-					}
-					
-					convertTimes.add(System.currentTimeMillis() - start);
-					convertCounts.add(count);
-					
-					logger.info("\tConvert count=" + count + " time=" + convertTimes.get(convertTimes.size() - 1) + " ms");
-					
-					count = 0;
-					for (Value value: values) {
-						logger.info("\t"  + value.toString());
-						count++;
-						if (count == 2) {
-							break;
-						}
-					}
-				}
-			}
-			
-			logger.info("********************* RESULTS *********************");
-			logger.info("Tested all 0 level tiles on " + type + " for " + request.get("dates"));
-			logger.info("This is a test of ConversionIterator performance");
-			
-			double baseSum = 0, convertSum = 0;
-			for (int i = 0; i < baseTimes.size(); i++) {
-				baseSum += baseTimes.get(i);
-				convertSum += convertTimes.get(i);
-			}
-			
-			logger.info("Average Base    Time: " + (baseSum / baseTimes.size()) + " ms");
-			logger.info("Average Convert Time: " + (convertSum / convertTimes.size()) + " ms");
-			
-			baseSum = 0; 
-			convertSum = 0;
-			
-			for (int i = 0; i < baseCounts.size(); i++) {
-				baseSum += baseCounts.get(i);
-				convertSum += convertCounts.get(i);
-			}
-			
-			logger.info("Average Base    Count: " + (baseSum / baseCounts.size()));
-			logger.info("Average Convert Count: " + (convertSum / convertCounts.size()));
-			
-			if (errors.size() > 0) {
-				logger.warn("ERRORS!!!:");
-				for (String e: errors) {
-					logger.warn(e);
-				}
-			}
-		} catch (Exception e) {
-			logger.error(e, e);
-		}
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/iterator-test/test.sh
----------------------------------------------------------------------
diff --git a/partition/iterator-test/test.sh b/partition/iterator-test/test.sh
deleted file mode 100644
index 5a6cf1f..0000000
--- a/partition/iterator-test/test.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-java -jar target/iterator-test-0.2.0-SNAPSHOT.jar z=localhost t=test $@

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/pom.xml
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/pom.xml b/partition/mr.partition.rdf/pom.xml
deleted file mode 100644
index bb1f0b2..0000000
--- a/partition/mr.partition.rdf/pom.xml
+++ /dev/null
@@ -1,79 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>mvm.rya</groupId>
-		<artifactId>parent</artifactId>
-		<version>2.0.0-SNAPSHOT</version>
-    </parent>
-
-    <groupId>mvm.mmrts.rdf</groupId>
-    <artifactId>mr.partition.rdf</artifactId>
-    <version>1.0.0-SNAPSHOT</version>
-    <name>${project.groupId}.${project.artifactId}</name>
-    
-    <dependencies>
-        <dependency>
-            <groupId>mvm.mmrts.rdf</groupId>
-            <artifactId>partition.rdf</artifactId>
-            <version>1.0.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryparser-sparql</artifactId>
-            <version>${openrdf.sesame.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>0.20.2</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.mrunit</groupId>
-            <artifactId>mrunit</artifactId>
-            <version>0.5.0-incubating</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-    <build>
-		<plugins>
-			<plugin>
-                <!-- NOTE: We don't need a groupId specification because the group is
-                        org.apache.maven.plugins ...which is assumed by default. -->
-                <artifactId>maven-assembly-plugin</artifactId>
-                <dependencies>
-                    <dependency>
-                        <groupId>mvm.cloud</groupId>
-                        <artifactId>hadoop-job-assembly</artifactId>
-                        <version>1.0.0-SNAPSHOT</version>
-                    </dependency>
-                </dependencies>
-                <executions>
-                    <execution>
-                        <id>make-assembly</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>single</goal>
-                        </goals>
-                        <configuration>
-                            <attach>false</attach>
-                            <descriptors>
-                                <descriptor>assemblies/job.xml</descriptor>
-                            </descriptors>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-</project>



[43/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/CloudbaseRyaQueryEngine.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/CloudbaseRyaQueryEngine.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/CloudbaseRyaQueryEngine.java
deleted file mode 100644
index a62aedb..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/CloudbaseRyaQueryEngine.java
+++ /dev/null
@@ -1,385 +0,0 @@
-package mvm.rya.cloudbase.query;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.ScannerBase;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.iterators.RegExIterator;
-import cloudbase.core.iterators.filter.AgeOffFilter;
-import cloudbase.core.iterators.filter.RegExFilter;
-import cloudbase.core.security.Authorizations;
-import com.google.common.base.Function;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.Iterators;
-import mango.collect.CloseableIterable;
-import mango.collect.CloseableIterables;
-import mango.collect.FluentCloseableIterable;
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.domain.RyaRange;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.layout.TableLayoutStrategy;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.query.BatchRyaQuery;
-import mvm.rya.api.persist.query.RyaQuery;
-import mvm.rya.api.persist.query.RyaQueryEngine;
-import mvm.rya.api.query.strategy.ByteRange;
-import mvm.rya.api.query.strategy.TriplePatternStrategy;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRowRegex;
-import mvm.rya.api.utils.CloseableIterableIteration;
-import mvm.rya.cloudbase.CloudbaseRdfConfiguration;
-import mvm.rya.iterators.LimitingAgeOffFilter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.Text;
-import org.openrdf.query.BindingSet;
-
-import java.io.IOException;
-import java.util.*;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-import static mvm.rya.api.RdfCloudTripleStoreUtils.layoutToTable;
-
-/**
- * Date: 7/17/12
- * Time: 9:28 AM
- */
-public class CloudbaseRyaQueryEngine implements RyaQueryEngine<CloudbaseRdfConfiguration> {
-
-    private Log logger = LogFactory.getLog(CloudbaseRyaQueryEngine.class);
-    private CloudbaseRdfConfiguration configuration;
-    private RyaContext ryaContext = RyaContext.getInstance();
-    private Connector connector;
-    private Map<TABLE_LAYOUT, KeyValueToRyaStatementFunction> keyValueToRyaStatementFunctionMap = new HashMap<TABLE_LAYOUT, KeyValueToRyaStatementFunction>();
-
-    public CloudbaseRyaQueryEngine(Connector connector) {
-        this(connector, new CloudbaseRdfConfiguration());
-    }
-
-    public CloudbaseRyaQueryEngine(Connector connector, CloudbaseRdfConfiguration conf) {
-        this.connector = connector;
-        this.configuration = conf;
-
-        keyValueToRyaStatementFunctionMap.put(TABLE_LAYOUT.SPO, new KeyValueToRyaStatementFunction(TABLE_LAYOUT.SPO));
-        keyValueToRyaStatementFunctionMap.put(TABLE_LAYOUT.PO, new KeyValueToRyaStatementFunction(TABLE_LAYOUT.PO));
-        keyValueToRyaStatementFunctionMap.put(TABLE_LAYOUT.OSP, new KeyValueToRyaStatementFunction(TABLE_LAYOUT.OSP));
-    }
-
-    @Override
-    public CloseableIteration<RyaStatement, RyaDAOException> query(RyaStatement stmt, CloudbaseRdfConfiguration conf) throws RyaDAOException {
-        if (conf == null) {
-            conf = configuration;
-        }
-
-        RyaQuery ryaQuery = RyaQuery.builder(stmt).load(conf).build();
-        CloseableIterable<RyaStatement> results = query(ryaQuery);
-
-        return new CloseableIterableIteration<RyaStatement, RyaDAOException>(results);
-    }
-
-    protected String getData(RyaType ryaType) {
-        return (ryaType != null) ? (ryaType.getData()) : (null);
-    }
-
-    @Override
-    public CloseableIteration<? extends Map.Entry<RyaStatement, BindingSet>, RyaDAOException> queryWithBindingSet(Collection<Map.Entry<RyaStatement, BindingSet>> stmts, CloudbaseRdfConfiguration conf) throws RyaDAOException {
-        if (conf == null) {
-            conf = configuration;
-        }
-        //query configuration
-        Authorizations authorizations = conf.getAuthorizations();
-        Long ttl = conf.getTtl();
-        Long maxResults = conf.getLimit();
-        Integer maxRanges = conf.getMaxRangesForScanner();
-        Integer numThreads = conf.getNumThreads();
-
-        //TODO: cannot span multiple tables here
-        try {
-            Collection<Range> ranges = new HashSet<Range>();
-            RangeBindingSetEntries rangeMap = new RangeBindingSetEntries();
-            TABLE_LAYOUT layout = null;
-            RyaURI context = null;
-            TriplePatternStrategy strategy = null;
-            for (Map.Entry<RyaStatement, BindingSet> stmtbs : stmts) {
-                RyaStatement stmt = stmtbs.getKey();
-                context = stmt.getContext(); //TODO: This will be overwritten
-                BindingSet bs = stmtbs.getValue();
-                strategy = ryaContext.retrieveStrategy(stmt);
-                if (strategy == null) {
-                    throw new IllegalArgumentException("TriplePattern[" + stmt + "] not supported");
-                }
-
-                Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT, ByteRange> entry =
-                        strategy.defineRange(stmt.getSubject(), stmt.getPredicate(), stmt.getObject(), stmt.getContext(), conf);
-
-                //use range to set scanner
-                //populate scanner based on authorizations, ttl
-                layout = entry.getKey();
-                ByteRange byteRange = entry.getValue();
-                Range range = new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd()));
-                ranges.add(range);
-                rangeMap.ranges.add(new RdfCloudTripleStoreUtils.CustomEntry<Range, BindingSet>(range, bs));
-            }
-            //no ranges
-            if (layout == null) return null;
-            String regexSubject = conf.getRegexSubject();
-            String regexPredicate = conf.getRegexPredicate();
-            String regexObject = conf.getRegexObject();
-            TripleRowRegex tripleRowRegex = strategy.buildRegex(regexSubject, regexPredicate, regexObject, null, null);
-
-            String table = layoutToTable(layout, conf);
-            boolean useBatchScanner = ranges.size() > maxRanges;
-            RyaStatementBindingSetKeyValueIterator iterator = null;
-            if (useBatchScanner) {
-                ScannerBase scanner = connector.createBatchScanner(table, authorizations, numThreads);
-                ((BatchScanner) scanner).setRanges(ranges);
-                fillScanner(scanner, context, ttl, tripleRowRegex);
-                iterator = new RyaStatementBindingSetKeyValueIterator(layout, scanner, rangeMap);
-            } else {
-                Scanner scannerBase = null;
-                Iterator<Map.Entry<Key, Value>>[] iters = new Iterator[ranges.size()];
-                int i = 0;
-                for (Range range : ranges) {
-                    scannerBase = connector.createScanner(table, authorizations);
-                    scannerBase.setRange(range);
-                    fillScanner(scannerBase, context, ttl, tripleRowRegex);
-                    iters[i] = scannerBase.iterator();
-                    i++;
-                }
-                iterator = new RyaStatementBindingSetKeyValueIterator(layout, Iterators.concat(iters), rangeMap);
-            }
-            if (maxResults != null) {
-                iterator.setMaxResults(maxResults);
-            }
-            return iterator;
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-
-    }
-
-    @Override
-    public CloseableIteration<RyaStatement, RyaDAOException> batchQuery(Collection<RyaStatement> stmts, CloudbaseRdfConfiguration conf)
-            throws RyaDAOException {
-        if (conf == null) {
-            conf = configuration;
-        }
-
-        BatchRyaQuery batchRyaQuery = BatchRyaQuery.builder(stmts).load(conf).build();
-        CloseableIterable<RyaStatement> results = query(batchRyaQuery);
-
-        return new CloseableIterableIteration<RyaStatement, RyaDAOException>(results);
-    }
-
-    @Override
-    public CloseableIterable<RyaStatement> query(RyaQuery ryaQuery) throws RyaDAOException {
-        Preconditions.checkNotNull(ryaQuery);
-        RyaStatement stmt = ryaQuery.getQuery();
-        Preconditions.checkNotNull(stmt);
-
-        //query configuration
-        String[] auths = ryaQuery.getAuths();
-        Authorizations authorizations = auths != null ? new Authorizations(auths) : configuration.getAuthorizations();
-        Long ttl = ryaQuery.getTtl();
-        Long currentTime = ryaQuery.getCurrentTime();
-        Long maxResults = ryaQuery.getMaxResults();
-        Integer batchSize = ryaQuery.getBatchSize();
-        String regexSubject = ryaQuery.getRegexSubject();
-        String regexPredicate = ryaQuery.getRegexPredicate();
-        String regexObject = ryaQuery.getRegexObject();
-        TableLayoutStrategy tableLayoutStrategy = configuration.getTableLayoutStrategy();
-
-        try {
-            //find triple pattern range
-            TriplePatternStrategy strategy = ryaContext.retrieveStrategy(stmt);
-            TABLE_LAYOUT layout;
-            Range range;
-            RyaURI subject = stmt.getSubject();
-            RyaURI predicate = stmt.getPredicate();
-            RyaType object = stmt.getObject();
-            RyaURI context = stmt.getContext();
-            String qualifier = stmt.getQualifer();
-            TripleRowRegex tripleRowRegex = null;
-            if (strategy != null) {
-                //otherwise, full table scan is supported
-                Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT, ByteRange> entry =
-                        strategy.defineRange(subject, predicate, object, context, null);
-                layout = entry.getKey();
-                ByteRange byteRange = entry.getValue();
-                range = new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd()));
-
-                byte[] objectTypeInfo = null;
-                if (object != null) {
-                    //TODO: Not good to serialize this twice
-                    if (object instanceof RyaRange) {
-                        objectTypeInfo = RyaContext.getInstance().serializeType(((RyaRange) object).getStart())[1];
-                    } else {
-                        objectTypeInfo = RyaContext.getInstance().serializeType(object)[1];
-                    }
-                }
-
-                tripleRowRegex = strategy.buildRegex(regexSubject, regexPredicate, regexObject, null, objectTypeInfo);
-            } else {
-                range = new Range();
-                layout = TABLE_LAYOUT.SPO;
-            }
-
-            //use range to set scanner
-            //populate scanner based on authorizations, ttl
-            String table = layoutToTable(layout, tableLayoutStrategy);
-            Scanner scanner = connector.createScanner(table, authorizations);
-            int itrLevel = 20;
-            if (context != null && qualifier != null) {
-                scanner.fetchColumn(new Text(context.getData()), new Text(qualifier));
-            } else if (context != null) {
-                scanner.fetchColumnFamily(new Text(context.getData()));
-            } else if (qualifier != null) {
-                scanner.setScanIterators(itrLevel++, RegExIterator.class.getName(), "riq");
-                scanner.setScanIteratorOption("riq", RegExFilter.COLQ_REGEX, qualifier);
-            }
-            if (ttl != null) {
-                scanner.setScanIterators(itrLevel++, FilteringIterator.class.getName(), "fi");
-                scanner.setScanIteratorOption("fi", "0", LimitingAgeOffFilter.class.getName());
-                scanner.setScanIteratorOption("fi", "0." + LimitingAgeOffFilter.TTL, ttl.toString());
-                if (currentTime != null)
-                    scanner.setScanIteratorOption("fi", "0." + LimitingAgeOffFilter.CURRENT_TIME, currentTime.toString());
-            }
-            scanner.setRange(range);
-            if (batchSize != null) {
-                scanner.setBatchSize(batchSize);
-            }
-            //TODO: Fill in context regex
-            if (tripleRowRegex != null) {
-                scanner.setScanIterators(itrLevel++, RegExIterator.class.getName(), "ri");
-                scanner.setScanIteratorOption("ri", RegExFilter.ROW_REGEX, tripleRowRegex.getRow());
-            }
-
-            FluentCloseableIterable<RyaStatement> results = FluentCloseableIterable.from(new ScannerCloseableIterable(scanner))
-                    .transform(keyValueToRyaStatementFunctionMap.get(layout));
-            if (maxResults != null) {
-                results = results.limit(maxResults.intValue());
-            }
-
-            return results;
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public CloseableIterable<RyaStatement> query(BatchRyaQuery ryaQuery) throws RyaDAOException {
-        Preconditions.checkNotNull(ryaQuery);
-        Iterable<RyaStatement> stmts = ryaQuery.getQueries();
-        Preconditions.checkNotNull(stmts);
-
-        //query configuration
-        String[] auths = ryaQuery.getAuths();
-        final Authorizations authorizations = auths != null ? new Authorizations(auths) : configuration.getAuthorizations();
-        final Long ttl = ryaQuery.getTtl();
-        Long currentTime = ryaQuery.getCurrentTime();
-        Long maxResults = ryaQuery.getMaxResults();
-        Integer batchSize = ryaQuery.getBatchSize();
-        Integer numQueryThreads = ryaQuery.getNumQueryThreads();
-        String regexSubject = ryaQuery.getRegexSubject();
-        String regexPredicate = ryaQuery.getRegexPredicate();
-        String regexObject = ryaQuery.getRegexObject();
-        TableLayoutStrategy tableLayoutStrategy = configuration.getTableLayoutStrategy();
-        int maxRanges = ryaQuery.getMaxRanges();
-
-        //TODO: cannot span multiple tables here
-        try {
-            Collection<Range> ranges = new HashSet<Range>();
-            TABLE_LAYOUT layout = null;
-            RyaURI context = null;
-            TriplePatternStrategy strategy = null;
-            for (RyaStatement stmt : stmts) {
-                context = stmt.getContext(); //TODO: This will be overwritten
-                strategy = ryaContext.retrieveStrategy(stmt);
-                if (strategy == null) {
-                    throw new IllegalArgumentException("TriplePattern[" + stmt + "] not supported");
-                }
-
-                Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT, ByteRange> entry =
-                        strategy.defineRange(stmt.getSubject(), stmt.getPredicate(), stmt.getObject(), stmt.getContext(), null);
-
-                //use range to set scanner
-                //populate scanner based on authorizations, ttl
-                layout = entry.getKey();
-                ByteRange byteRange = entry.getValue();
-                Range range = new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd()));
-                ranges.add(range);
-            }
-            //no ranges
-            if (layout == null) throw new IllegalArgumentException("No table layout specified");
-
-            final TripleRowRegex tripleRowRegex = strategy.buildRegex(regexSubject, regexPredicate, regexObject, null, null);
-
-            final String table = layoutToTable(layout, tableLayoutStrategy);
-            boolean useBatchScanner = ranges.size() > maxRanges;
-            FluentCloseableIterable<RyaStatement> results = null;
-            if (useBatchScanner) {
-                BatchScanner scanner = connector.createBatchScanner(table, authorizations, numQueryThreads);
-                scanner.setRanges(ranges);
-                fillScanner(scanner, context, ttl, tripleRowRegex);
-                results = FluentCloseableIterable.from(new BatchScannerCloseableIterable(scanner)).transform(keyValueToRyaStatementFunctionMap.get(layout));
-            } else {
-                final RyaURI fcontext = context;
-                FluentIterable<RyaStatement> fluent = FluentIterable.from(ranges).transformAndConcat(new Function<Range, Iterable<Map.Entry<Key, Value>>>() {
-                    @Override
-                    public Iterable<Map.Entry<Key, Value>> apply(Range range) {
-                        try {
-                            Scanner scanner = connector.createScanner(table, authorizations);
-                            scanner.setRange(range);
-                            fillScanner(scanner, fcontext, ttl, tripleRowRegex);
-                            return scanner;
-                        } catch (Exception e) {
-                            throw new RuntimeException(e);
-                        }
-                    }
-                }).transform(keyValueToRyaStatementFunctionMap.get(layout));
-
-                results = FluentCloseableIterable.from(CloseableIterables.wrap(fluent));
-            }
-            if (maxResults != null) {
-                results = results.limit(maxResults.intValue());
-            }
-            return results;
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    protected void fillScanner(ScannerBase scanner, RyaURI context, Long ttl, TripleRowRegex tripleRowRegex) throws IOException {
-        if (context != null) {
-            scanner.fetchColumnFamily(new Text(context.getData()));
-        }
-        if (ttl != null) {
-            scanner.setScanIterators(9, FilteringIterator.class.getName(), "fi");
-            scanner.setScanIteratorOption("fi", "0", AgeOffFilter.class.getName());
-            scanner.setScanIteratorOption("fi", "0.ttl", ttl.toString());
-        }
-        if (tripleRowRegex != null) {
-            scanner.setScanIterators(11, RegExIterator.class.getName(), "ri");
-            scanner.setScanIteratorOption("ri", RegExFilter.ROW_REGEX, tripleRowRegex.getRow());
-        }
-    }
-
-    @Override
-    public void setConf(CloudbaseRdfConfiguration conf) {
-        this.configuration = conf;
-    }
-
-    @Override
-    public CloudbaseRdfConfiguration getConf() {
-        return configuration;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/KeyValueToRyaStatementFunction.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/KeyValueToRyaStatementFunction.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/KeyValueToRyaStatementFunction.java
deleted file mode 100644
index bb92c23..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/KeyValueToRyaStatementFunction.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package mvm.rya.cloudbase.query;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.base.Function;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-
-import java.util.Map;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-
-/**
- * Date: 1/30/13
- * Time: 2:09 PM
- */
-public class KeyValueToRyaStatementFunction implements Function<Map.Entry<Key, Value>, RyaStatement> {
-
-    private TABLE_LAYOUT tableLayout;
-
-    public KeyValueToRyaStatementFunction(TABLE_LAYOUT tableLayout) {
-        this.tableLayout = tableLayout;
-    }
-
-    @Override
-    public RyaStatement apply(Map.Entry<Key, Value> input) {
-        Key key = input.getKey();
-        Value value = input.getValue();
-        RyaStatement statement = null;
-        try {
-            statement = RyaContext.getInstance().deserializeTriple(tableLayout,
-                    new TripleRow(key.getRowData().toArray(),
-                            key.getColumnFamilyData().toArray(),
-                            key.getColumnQualifierData().toArray(),
-                            key.getTimestamp(),
-                            key.getColumnVisibilityData().toArray(),
-                            (value != null) ? value.get() : null
-                    ));
-        } catch (TripleRowResolverException e) {
-            throw new RuntimeException(e);
-        }
-
-        return statement;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RangeBindingSetEntries.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RangeBindingSetEntries.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RangeBindingSetEntries.java
deleted file mode 100644
index 69c6147..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RangeBindingSetEntries.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package mvm.rya.cloudbase.query;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import org.openrdf.query.BindingSet;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Map;
-
-/**
- * Class RangeBindingSetCollection
- * Date: Feb 23, 2011
- * Time: 10:15:48 AM
- */
-public class RangeBindingSetEntries {
-    public Collection<Map.Entry<Range, BindingSet>> ranges;
-
-    public RangeBindingSetEntries() {
-        this(new ArrayList<Map.Entry<Range, BindingSet>>());
-    }
-
-    public RangeBindingSetEntries(Collection<Map.Entry<Range, BindingSet>> ranges) {
-        this.ranges = ranges;
-    }
-
-    public Collection<BindingSet> containsKey(Key key) {
-        //TODO: need to find a better way to sort these and pull
-        //TODO: maybe fork/join here
-        Collection<BindingSet> bss = new ArrayList<BindingSet>();
-        for (Map.Entry<Range, BindingSet> entry : ranges) {
-            if (entry.getKey().contains(key))
-                bss.add(entry.getValue());
-        }
-        return bss;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementBindingSetKeyValueIterator.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementBindingSetKeyValueIterator.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementBindingSetKeyValueIterator.java
deleted file mode 100644
index cef9eb6..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementBindingSetKeyValueIterator.java
+++ /dev/null
@@ -1,129 +0,0 @@
-package mvm.rya.cloudbase.query;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.ScannerBase;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-import mvm.rya.cloudbase.BatchScannerIterator;
-import org.openrdf.query.BindingSet;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Map;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-
-/**
- * Date: 7/17/12
- * Time: 11:48 AM
- */
-public class RyaStatementBindingSetKeyValueIterator implements CloseableIteration<Map.Entry<RyaStatement, BindingSet>, RyaDAOException> {
-    private Iterator<Map.Entry<Key, Value>> dataIterator;
-    private TABLE_LAYOUT tableLayout;
-    private Long maxResults = -1L;
-    private ScannerBase scanner;
-    private boolean isBatchScanner;
-    private RangeBindingSetEntries rangeMap;
-    private Iterator<BindingSet> bsIter;
-    private RyaStatement statement;
-
-    public RyaStatementBindingSetKeyValueIterator(TABLE_LAYOUT tableLayout, ScannerBase scannerBase, RangeBindingSetEntries rangeMap) {
-        this(tableLayout, ((scannerBase instanceof BatchScanner) ? new BatchScannerIterator(((BatchScanner) scannerBase).iterator()) : ((Scanner) scannerBase).iterator()), rangeMap);
-        this.scanner = scannerBase;
-        isBatchScanner = scanner instanceof BatchScanner;
-    }
-
-    public RyaStatementBindingSetKeyValueIterator(TABLE_LAYOUT tableLayout, Iterator<Map.Entry<Key, Value>> dataIterator, RangeBindingSetEntries rangeMap) {
-        this.tableLayout = tableLayout;
-        this.rangeMap = rangeMap;
-        this.dataIterator = dataIterator;
-    }
-
-    @Override
-    public void close() throws RyaDAOException {
-        dataIterator = null;
-        if (scanner != null && isBatchScanner) {
-            ((BatchScanner) scanner).close();
-        }
-    }
-
-    public boolean isClosed() throws RyaDAOException {
-        return dataIterator == null;
-    }
-
-    @Override
-    public boolean hasNext() throws RyaDAOException {
-        if (isClosed()) {
-            throw new RyaDAOException("Closed Iterator");
-        }
-        if (maxResults != 0) {
-            if (bsIter != null && bsIter.hasNext()) {
-                return true;
-            }
-            if (dataIterator.hasNext()) {
-                return true;
-            } else {
-                maxResults = 0l;
-                return false;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public Map.Entry<RyaStatement, BindingSet> next() throws RyaDAOException {
-        if (!hasNext()) {
-            return null;
-        }
-
-        try {
-            while (true) {
-                if (bsIter != null && bsIter.hasNext()) {
-                    maxResults--;
-                    return new RdfCloudTripleStoreUtils.CustomEntry<RyaStatement, BindingSet>(statement, bsIter.next());
-                }
-
-                if (dataIterator.hasNext()) {
-                    Map.Entry<Key, Value> next = dataIterator.next();
-                    Key key = next.getKey();
-                    statement = RyaContext.getInstance().deserializeTriple(tableLayout,
-                            new TripleRow(key.getRowData().toArray(), key.getColumnFamilyData().toArray(), key.getColumnQualifierData().toArray(),
-                                    key.getTimestamp(), key.getColumnVisibilityData().toArray(), next.getValue().get()));
-                    if (next.getValue() != null) {
-                        statement.setValue(next.getValue().get());
-                    }
-                    Collection<BindingSet> bindingSets = rangeMap.containsKey(key);
-                    if (!bindingSets.isEmpty()) {
-                        bsIter = bindingSets.iterator();
-                    }
-                } else {
-                    break;
-                }
-            }
-            return null;
-        } catch (TripleRowResolverException e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public void remove() throws RyaDAOException {
-        next();
-    }
-
-    public Long getMaxResults() {
-        return maxResults;
-    }
-
-    public void setMaxResults(Long maxResults) {
-        this.maxResults = maxResults;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementKeyValueIterator.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementKeyValueIterator.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementKeyValueIterator.java
deleted file mode 100644
index 602affe..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/RyaStatementKeyValueIterator.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package mvm.rya.cloudbase.query;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-
-import java.util.Iterator;
-import java.util.Map;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-
-/**
- * Date: 7/17/12
- * Time: 11:48 AM
- */
-public class RyaStatementKeyValueIterator implements CloseableIteration<RyaStatement, RyaDAOException> {
-    private Iterator<Map.Entry<Key, Value>> dataIterator;
-    private TABLE_LAYOUT tableLayout;
-    private Long maxResults = -1L;
-
-    public RyaStatementKeyValueIterator(TABLE_LAYOUT tableLayout, Iterator<Map.Entry<Key, Value>> dataIterator) {
-        this.tableLayout = tableLayout;
-        this.dataIterator = dataIterator;
-    }
-
-    @Override
-    public void close() throws RyaDAOException {
-        dataIterator = null;
-    }
-
-    public boolean isClosed() throws RyaDAOException {
-        return dataIterator == null;
-    }
-
-    @Override
-    public boolean hasNext() throws RyaDAOException {
-        if (isClosed()) {
-            throw new RyaDAOException("Closed Iterator");
-        }
-        return maxResults != 0 && dataIterator.hasNext();
-    }
-
-    @Override
-    public RyaStatement next() throws RyaDAOException {
-        if (!hasNext()) {
-            return null;
-        }
-
-        try {
-            Map.Entry<Key, Value> next = dataIterator.next();
-            Key key = next.getKey();
-            RyaStatement statement = RyaContext.getInstance().deserializeTriple(tableLayout,
-                    new TripleRow(key.getRowData().toArray(), key.getColumnFamilyData().toArray(), key.getColumnQualifierData().toArray(),
-                            key.getTimestamp(), key.getColumnVisibilityData().toArray(), next.getValue().get()));
-            if (next.getValue() != null) {
-                statement.setValue(next.getValue().get());
-            }
-            maxResults--;
-            return statement;
-        } catch (TripleRowResolverException e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public void remove() throws RyaDAOException {
-        next();
-    }
-
-    public Long getMaxResults() {
-        return maxResults;
-    }
-
-    public void setMaxResults(Long maxResults) {
-        this.maxResults = maxResults;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/ScannerCloseableIterable.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/ScannerCloseableIterable.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/ScannerCloseableIterable.java
deleted file mode 100644
index f9a51fc..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/ScannerCloseableIterable.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package mvm.rya.cloudbase.query;
-
-import cloudbase.core.client.Scanner;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.base.Preconditions;
-import mango.collect.AbstractCloseableIterable;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-
-/**
- * Date: 1/30/13
- * Time: 2:15 PM
- */
-public class ScannerCloseableIterable extends AbstractCloseableIterable<Map.Entry<Key, Value>> {
-
-    protected Scanner scanner;
-
-    public ScannerCloseableIterable(Scanner scanner) {
-        Preconditions.checkNotNull(scanner);
-        this.scanner = scanner;
-    }
-
-    @Override
-    protected void doClose() throws IOException {
-        //do nothing
-    }
-
-    @Override
-    protected Iterator<Map.Entry<Key, Value>> retrieveIterator() {
-        return scanner.iterator();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseResearchMain.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseResearchMain.java b/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseResearchMain.java
deleted file mode 100644
index 2c1ea55..0000000
--- a/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseResearchMain.java
+++ /dev/null
@@ -1,77 +0,0 @@
-//package mvm.mmrts.cloudbase;
-//
-//import cloudbase.core.CBConstants;
-//import cloudbase.core.client.BatchScanner;
-//import cloudbase.core.client.Connector;
-//import cloudbase.core.client.Scanner;
-//import cloudbase.core.client.ZooKeeperInstance;
-//import cloudbase.core.client.impl.MasterClient;
-//import cloudbase.core.data.Key;
-//import cloudbase.core.data.Range;
-//import cloudbase.core.data.Value;
-//import cloudbase.core.master.thrift.MasterClientService;
-//import cloudbase.core.master.thrift.MasterMonitorInfo;
-//import cloudbase.core.master.thrift.TableInfo;
-//import cloudbase.core.master.thrift.TabletServerStatus;
-//import cloudbase.core.security.thrift.AuthInfo;
-//import mvm.rya.cloudbase.utils.pri.PriorityIterator;
-//import org.apache.hadoop.io.Text;
-//
-//import java.util.Collections;
-//import java.util.Iterator;
-//import java.util.List;
-//import java.util.Map;
-//
-///**
-// * Created by IntelliJ IDEA.
-// * User: RoshanP
-// * Date: 3/28/12
-// * Time: 5:32 PM
-// * To change this template use File | Settings | File Templates.
-// */
-//public class CloudbaseResearchMain {
-//
-//
-//    public static void main(String[] args) {
-//        try {
-//            ZooKeeperInstance instance = new ZooKeeperInstance("stratus", "stratus13:2181");
-//
-//            MasterClientService.Iface client = MasterClient.getConnection(instance, false);
-//            MasterMonitorInfo mmi = client.getMasterStats(null, new AuthInfo("root", "password".getBytes(), "stratus"));
-//
-//            List<TabletServerStatus> tServerInfo = mmi.getTServerInfo();
-//            for (TabletServerStatus tstatus : tServerInfo) {
-//                System.out.println(tstatus.getName());
-//                System.out.println(tstatus.getOsLoad());
-//                Map<String, TableInfo> tableMap = tstatus.getTableMap();
-//                double ingestRate = 0;
-//                double queryRate = 0;
-//                for (Map.Entry<String, TableInfo> entry : tableMap.entrySet()) {
-//                    String tableName = entry.getKey();
-//                    TableInfo tableInfo = entry.getValue();
-//                    ingestRate += tableInfo.getIngestRate();
-//                    queryRate += tableInfo.getQueryRate();
-//                }
-//                System.out.println(ingestRate);
-//                System.out.println(queryRate);
-//            }
-//
-//            Connector connector = instance.getConnector("root", "password".getBytes());
-////            BatchScanner scanner = connector.createBatchScanner("l_spo", CBConstants.NO_AUTHS, 10);
-////            scanner.setRanges(Collections.singleton(new Range(new Text("\0"), new Text("\uFFFD"))));
-//            Scanner scanner = connector.createScanner("l_spo", CBConstants.NO_AUTHS);
-//            scanner.setScanIterators(20, PriorityIterator.class.getName(), "pi");
-//            Iterator<Map.Entry<Key,Value>> iter = scanner.iterator();
-//            int count = 0;
-//            while(iter.hasNext()) {
-//                iter.next();
-//                System.out.println(count++);
-////                if(count == 100) break;
-//            }
-////            scanner.close();
-//
-//        } catch (Exception e) {
-//            e.printStackTrace();
-//        }
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseRyaDAOTest.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseRyaDAOTest.java b/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseRyaDAOTest.java
deleted file mode 100644
index af3e9ab..0000000
--- a/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/CloudbaseRyaDAOTest.java
+++ /dev/null
@@ -1,588 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.client.mock.MockInstance;
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.cloudbase.query.CloudbaseRyaQueryEngine;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.model.vocabulary.XMLSchema;
-import org.openrdf.query.BindingSet;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Map;
-import java.util.UUID;
-
-import static org.junit.Assert.*;
-
-/**
- * Class CloudbaseRyaDAOTest
- * Date: Mar 7, 2012
- * Time: 9:42:28 AM
- */
-public class CloudbaseRyaDAOTest {
-
-    private CloudbaseRyaDAO dao;
-    private ValueFactory vf = new ValueFactoryImpl();
-    static String litdupsNS = "urn:test:litdups#";
-    private Connector connector;
-    private CloudbaseRdfConfiguration conf = new CloudbaseRdfConfiguration();
-
-    @Before
-    public void setUp() throws Exception {
-        dao = new CloudbaseRyaDAO();
-        connector = new MockInstance().getConnector("", "");
-        dao.setConnector(connector);
-        dao.setConf(conf);
-        dao.init();
-    }
-
-    @After
-    public void tearDown() throws Exception {
-        //dao.purge(conf);
-        dao.destroy();
-    }
-
-    @Test
-    public void testAdd() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI uri1 = new RyaURI(litdupsNS + "uri1");
-        dao.add(new RyaStatement(cpu, loadPerc, uri1));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf);
-        int count = 0;
-        while (iter.hasNext()) {
-            assertTrue(uri1.equals(iter.next().getObject()));
-            count++;
-        }
-        iter.close();
-        assertEquals(1, count);
-
-        dao.delete(new RyaStatement(cpu, loadPerc, uri1), conf);
-
-        iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf);
-        count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        iter.close();
-        assertEquals(0, count);
-    }
-
-    @Test
-    public void testMaxResults() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri3")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri4")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5")));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration(conf);
-        long limit = 3l;
-        queryConf.setLimit(limit);
-
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf);
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next().getObject();
-            count++;
-        }
-        iter.close();
-        assertEquals(limit, count);
-    }
-
-    @Test
-    public void testTTL() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        long current = System.currentTimeMillis();
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1"), null, null, null, null, current));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2"), null, null, null, null, current - 1000l));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri3"), null, null, null, null, current - 2000l));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri4"), null, null, null, null, current - 3000l));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5"), null, null, null, null, current - 4000l));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration(conf);
-        queryConf.setTtl(3000l);
-
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf);
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next().getObject();
-            count++;
-        }
-        iter.close();
-        assertEquals(3, count);
-
-        queryConf.setStartTime(current - 3000l);
-        iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf);
-        count = 0;
-        while (iter.hasNext()) {
-            iter.next().getObject();
-            count++;
-        }
-        iter.close();
-        assertEquals(2, count);
-    }
-
-    @Test
-    public void testPredRegex() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI loadPerc2 = new RyaURI(litdupsNS + "loadPerc2");
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2")));
-        dao.add(new RyaStatement(cpu, loadPerc2, new RyaURI(litdupsNS + "uri3")));
-        dao.add(new RyaStatement(cpu, loadPerc2, new RyaURI(litdupsNS + "uri4")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5")));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration(conf);
-        queryConf.setRegexPredicate(litdupsNS + "loadPerc[2]");
-
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, null, null), queryConf);
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        assertEquals(2, count);
-    }
-
-    @Test
-    public void testSubjectRegex() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI cpu2 = new RyaURI(litdupsNS + "cpu2");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI loadPerc2 = new RyaURI(litdupsNS + "loadPerc2");
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2")));
-        dao.add(new RyaStatement(cpu, loadPerc2, new RyaURI(litdupsNS + "uri3")));
-        dao.add(new RyaStatement(cpu, loadPerc2, new RyaURI(litdupsNS + "uri4")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5")));
-        dao.add(new RyaStatement(cpu2, loadPerc, new RyaURI(litdupsNS + "uri5")));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration(conf);
-        queryConf.setRegexSubject(cpu.getData());
-
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(null, loadPerc, null), queryConf);
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        assertEquals(3, count);
-    }
-
-    @Test
-    public void testFullTableScan() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI cpu2 = new RyaURI(litdupsNS + "cpu2");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI loadPerc2 = new RyaURI(litdupsNS + "loadPerc2");
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2")));
-        dao.add(new RyaStatement(cpu, loadPerc2, new RyaURI(litdupsNS + "uri3")));
-        dao.add(new RyaStatement(cpu, loadPerc2, new RyaURI(litdupsNS + "uri4")));
-        dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5")));
-        dao.add(new RyaStatement(cpu2, loadPerc, new RyaURI(litdupsNS + "uri5")));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration(conf);
-        queryConf.setRegexSubject(cpu.getData());
-
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(null, null, null), queryConf);
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        assertEquals(7, count); //includes the rts:version
-    }
-
-    @Test
-    public void testAddValue() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI uri1 = new RyaURI(litdupsNS + "uri1");
-        String myval = "myval";
-        dao.add(new RyaStatement(cpu, loadPerc, uri1, null, null, null, myval.getBytes()));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf);
-        assertTrue(iter.hasNext());
-        assertEquals(myval, new String(iter.next().getValue()));
-        iter.close();
-    }
-
-    @Test
-    public void testDeleteDiffVisibility() throws Exception {
-        RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu"));
-        RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc"));
-        RyaURI uri1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "uri1"));
-        RyaStatement stmt1 = new RyaStatement(cpu, loadPerc, uri1, null, "1", "vis1".getBytes());
-        dao.add(stmt1);
-        RyaStatement stmt2 = new RyaStatement(cpu, loadPerc, uri1, null, "2", "vis2".getBytes());
-        dao.add(stmt2);
-
-        CloudbaseRdfConfiguration cloneConf = conf.clone();
-        cloneConf.setAuth("vis1,vis2");
-
-        CloseableIteration<RyaStatement, RyaDAOException> iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), cloneConf);
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        assertEquals(2, count);
-
-        dao.delete(stmt1, cloneConf);
-
-        iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), cloneConf);
-        count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        assertEquals(1, count);
-    }
-
-    @Test
-    public void testAddCv() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI uri1 = new RyaURI(litdupsNS + "uri1");
-        RyaURI uri2 = new RyaURI(litdupsNS + "uri2");
-        RyaURI uri3 = new RyaURI(litdupsNS + "uri3");
-        byte[] colVisABC = "A|B|C".getBytes();
-        byte[] colVisAB = "A|B".getBytes();
-        byte[] colVisA = "A".getBytes();
-        dao.add(new RyaStatement(cpu, loadPerc, uri1, null, null, colVisABC));
-        dao.add(new RyaStatement(cpu, loadPerc, uri2, null, null, colVisAB));
-        dao.add(new RyaStatement(cpu, loadPerc, uri3, null, null, colVisA));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-
-        //query with no auth
-        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf);
-        int count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        assertEquals(0, count);
-        iter.close();
-
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration();
-        queryConf.setAuth("B");
-        iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf);
-        count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        assertEquals(2, count);
-
-        queryConf.setAuth("A");
-        iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf);
-        count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        assertEquals(3, count);
-    }
-
-    @Test
-    public void testGetNamespace() throws Exception {
-        dao.addNamespace("ns", litdupsNS);
-        assertEquals(litdupsNS, dao.getNamespace("ns"));
-        dao.removeNamespace("ns");
-        assertNull(dao.getNamespace("ns"));
-    }
-
-    //TOOD: Add test for set of queries
-    @Test
-    public void testQuery() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI uri1 = new RyaURI(litdupsNS + "uri1");
-        RyaURI uri2 = new RyaURI(litdupsNS + "uri2");
-        RyaURI uri3 = new RyaURI(litdupsNS + "uri3");
-        RyaURI uri4 = new RyaURI(litdupsNS + "uri4");
-        RyaURI uri5 = new RyaURI(litdupsNS + "uri5");
-        RyaURI uri6 = new RyaURI(litdupsNS + "uri6");
-        dao.add(new RyaStatement(cpu, loadPerc, uri1));
-        dao.add(new RyaStatement(cpu, loadPerc, uri2));
-        dao.add(new RyaStatement(cpu, loadPerc, uri3));
-        dao.add(new RyaStatement(cpu, loadPerc, uri4));
-        dao.add(new RyaStatement(cpu, loadPerc, uri5));
-        dao.add(new RyaStatement(cpu, loadPerc, uri6));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-
-        Collection<Map.Entry<RyaStatement, BindingSet>> coll = new ArrayList();
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri1), null));
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri2), null));
-        CloseableIteration<? extends Map.Entry<RyaStatement, BindingSet>, RyaDAOException> iter = queryEngine.queryWithBindingSet(coll, conf);
-        int count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        iter.close();
-        assertEquals(2, count);
-
-        //now use batchscanner
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration(conf);
-        queryConf.setMaxRangesForScanner(2);
-
-        coll = new ArrayList();
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri1), null));
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri2), null));
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri3), null));
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri4), null));
-        iter = queryEngine.queryWithBindingSet(coll, queryConf);
-        assertTrue(iter.hasNext()); //old code had a weird behaviour that could not perform hasNext consecutively
-        assertTrue(iter.hasNext());
-        assertTrue(iter.hasNext());
-        count = 0;
-        while (iter.hasNext()) {
-            count++;
-            assertTrue(iter.hasNext());
-            iter.next();
-        }
-        iter.close();
-        assertEquals(4, count);
-    }
-
-    @Test
-    public void testQueryCollectionRegex() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI uri1 = new RyaURI(litdupsNS + "uri1");
-        RyaURI uri2 = new RyaURI(litdupsNS + "uri2");
-        RyaURI uri3 = new RyaURI(litdupsNS + "uri3");
-        RyaURI uri4 = new RyaURI(litdupsNS + "uri4");
-        RyaURI uri5 = new RyaURI(litdupsNS + "uri5");
-        RyaURI uri6 = new RyaURI(litdupsNS + "uri6");
-        dao.add(new RyaStatement(cpu, loadPerc, uri1));
-        dao.add(new RyaStatement(cpu, loadPerc, uri2));
-        dao.add(new RyaStatement(cpu, loadPerc, uri3));
-        dao.add(new RyaStatement(cpu, loadPerc, uri4));
-        dao.add(new RyaStatement(cpu, loadPerc, uri5));
-        dao.add(new RyaStatement(cpu, loadPerc, uri6));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-
-        Collection<Map.Entry<RyaStatement, BindingSet>> coll = new ArrayList();
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri1), null));
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri2), null));
-        conf.setRegexPredicate(loadPerc.getData());
-        CloseableIteration<? extends Map.Entry<RyaStatement, BindingSet>, RyaDAOException> iter = queryEngine.queryWithBindingSet(coll, conf);
-        int count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        iter.close();
-        assertEquals(2, count);
-
-        conf.setRegexPredicate("notLoadPerc");
-        iter = queryEngine.queryWithBindingSet(coll, conf);
-        count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        iter.close();
-        assertEquals(0, count);
-    }
-
-    @Test
-    public void testQueryCollectionRegexWBatchScanner() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaURI uri1 = new RyaURI(litdupsNS + "uri1");
-        RyaURI uri2 = new RyaURI(litdupsNS + "uri2");
-        RyaURI uri3 = new RyaURI(litdupsNS + "uri3");
-        RyaURI uri4 = new RyaURI(litdupsNS + "uri4");
-        RyaURI uri5 = new RyaURI(litdupsNS + "uri5");
-        RyaURI uri6 = new RyaURI(litdupsNS + "uri6");
-        dao.add(new RyaStatement(cpu, loadPerc, uri1));
-        dao.add(new RyaStatement(cpu, loadPerc, uri2));
-        dao.add(new RyaStatement(cpu, loadPerc, uri3));
-        dao.add(new RyaStatement(cpu, loadPerc, uri4));
-        dao.add(new RyaStatement(cpu, loadPerc, uri5));
-        dao.add(new RyaStatement(cpu, loadPerc, uri6));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-        CloudbaseRdfConfiguration queryConf = new CloudbaseRdfConfiguration(conf);
-        queryConf.setMaxRangesForScanner(1);
-
-        Collection<Map.Entry<RyaStatement, BindingSet>> coll = new ArrayList();
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri1), null));
-        coll.add(new RdfCloudTripleStoreUtils.CustomEntry(new RyaStatement(null, loadPerc, uri2), null));
-        conf.setRegexPredicate(loadPerc.getData());
-        CloseableIteration<? extends Map.Entry<RyaStatement, BindingSet>, RyaDAOException> iter = queryEngine.queryWithBindingSet(coll, queryConf);
-        int count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        iter.close();
-        assertEquals(2, count);
-
-        queryConf.setRegexPredicate("notLoadPerc");
-        iter = queryEngine.queryWithBindingSet(coll, queryConf);
-        count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        iter.close();
-        assertEquals(0, count);
-    }
-
-    @Test
-    public void testLiteralTypes() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaType longLit = new RyaType(XMLSchema.LONG, "3");
-
-        dao.add(new RyaStatement(cpu, loadPerc, longLit));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-
-        CloseableIteration<RyaStatement, RyaDAOException> query = queryEngine.query(new RyaStatement(cpu, null, null), conf);
-        assertTrue(query.hasNext());
-        RyaStatement next = query.next();
-        assertEquals(new Long(longLit.getData()), new Long(next.getObject().getData()));
-        query.close();
-
-        RyaType doubleLit = new RyaType(XMLSchema.DOUBLE, "2.0");
-
-        dao.add(new RyaStatement(cpu, loadPerc, doubleLit));
-
-        query = queryEngine.query(new RyaStatement(cpu, loadPerc, doubleLit), conf);
-        assertTrue(query.hasNext());
-        next = query.next();
-        assertEquals(Double.parseDouble(doubleLit.getData()), Double.parseDouble(next.getObject().getData()), 0.001);
-        query.close();
-    }
-
-    @Test
-    public void testSameLiteralStringTypes() throws Exception {
-        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
-        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
-        RyaType longLit = new RyaType(XMLSchema.LONG, "10");
-        RyaType strLit = new RyaType(XMLSchema.STRING, new String(RyaContext.getInstance().serializeType(longLit)[0]));
-
-        RyaStatement expected = new RyaStatement(cpu, loadPerc, longLit);
-        dao.add(expected);
-        dao.add(new RyaStatement(cpu, loadPerc, strLit));
-
-        CloudbaseRyaQueryEngine queryEngine = dao.getQueryEngine();
-
-        CloseableIteration<RyaStatement, RyaDAOException> query = queryEngine.query(new RyaStatement(cpu, loadPerc, longLit), conf);
-        assertTrue(query.hasNext());
-        RyaStatement next = query.next();
-        assertEquals(new Long(longLit.getData()), new Long(next.getObject().getData()));
-        assertEquals(longLit.getDataType(), next.getObject().getDataType());
-        assertFalse(query.hasNext());
-        query.close();
-    }
-
-    @Test
-    @Ignore("Purge does not work with the batch deleter in mock cloudbase being null")
-    public void testPurge() throws RyaDAOException, TableNotFoundException {
-        dao.add(newRyaStatement());
-        assertFalse("table should not be empty", areTablesEmpty());
-
-        dao.purge(conf);
-        assertTrue("table should be empty", areTablesEmpty());
-        //assertNotNull(dao.getVersion());
-    }
-
-    @Test
-    @Ignore("Purge does not work with the batch deleter in mock cloudbase being null")
-    public void testPurgeDoesNotBreakBatchWriters() throws TableNotFoundException, RyaDAOException {
-        dao.purge(conf);
-        assertTrue("table should be empty", areTablesEmpty());
-
-        dao.add(newRyaStatement());
-        assertFalse("table should not be empty", areTablesEmpty());
-    }
-
-    @Test
-    public void testDropAndDestroy() throws RyaDAOException {
-        assertTrue(dao.isInitialized());
-        dao.dropAndDestroy();
-        for (String tableName : dao.getTables()) {
-            assertFalse(tableExists(tableName));
-        }
-        assertFalse(dao.isInitialized());
-    }
-
-    private boolean areTablesEmpty() throws TableNotFoundException {
-        for (String table : dao.getTables()) {
-            if (tableExists(table)) {
-                // TODO: filter out version
-                if (createScanner(table).iterator().hasNext()) {
-                    return false;
-                }
-            }
-        }
-        return true;
-    }
-
-    private boolean tableExists(String tableName) {
-        return connector.tableOperations().exists(tableName);
-    }
-
-    private Scanner createScanner(String tableName) throws TableNotFoundException {
-        return dao.getConnector().createScanner(tableName, conf.getAuthorizations());
-    }
-
-    private RyaStatement newRyaStatement() {
-        RyaURI subject = new RyaURI(litdupsNS + randomString());
-        RyaURI predicate = new RyaURI(litdupsNS + randomString());
-        RyaType object = new RyaType(randomString());
-
-        return new RyaStatement(subject, predicate, object);
-    }
-
-    private String randomString() {
-        return UUID.randomUUID().toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/DefineTripleQueryRangeFactoryTest.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/DefineTripleQueryRangeFactoryTest.java b/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/DefineTripleQueryRangeFactoryTest.java
deleted file mode 100644
index ddff532..0000000
--- a/dao/cloudbase.rya/src/test/java/mvm/rya/cloudbase/DefineTripleQueryRangeFactoryTest.java
+++ /dev/null
@@ -1,242 +0,0 @@
-//package mvm.rya.cloudbase;
-//
-//import cloudbase.core.data.Range;
-//import junit.framework.TestCase;
-//import mvm.rya.api.domain.RangeValue;
-//import mvm.rya.cloudbase.query.DefineTripleQueryRangeFactory;
-//import org.openrdf.model.URI;
-//import org.openrdf.model.Value;
-//import org.openrdf.model.ValueFactory;
-//import org.openrdf.model.impl.ValueFactoryImpl;
-//
-//import java.util.Map;
-//
-//import static mvm.rya.api.RdfCloudTripleStoreConstants.*;
-//
-///**
-// */
-//public class DefineTripleQueryRangeFactoryTest extends TestCase {
-//
-//    public static final String DELIM_BYTES_STR = new String(DELIM_BYTES);
-//    public static final String URI_MARKER_STR = "\u0007";
-//    public static final String RANGE_ENDKEY_SUFFIX = "\u0000";
-//    DefineTripleQueryRangeFactory factory = new DefineTripleQueryRangeFactory();
-//    ValueFactory vf = ValueFactoryImpl.getInstance();
-//    static String litdupsNS = "urn:test:litdups#";
-//
-//    private CloudbaseRdfConfiguration conf = new CloudbaseRdfConfiguration();
-//
-//    public void testSPOCases() throws Exception {
-//        URI cpu = vf.createURI(litdupsNS, "cpu");
-//        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-//        URI obj = vf.createURI(litdupsNS, "uri1");
-//
-//        //spo
-//        Map.Entry<TABLE_LAYOUT, Range> entry =
-//                factory.defineRange(cpu, loadPerc, obj, conf);
-//        assertEquals(TABLE_LAYOUT.SPO, entry.getKey());
-//        String expected_start = URI_MARKER_STR + cpu.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + loadPerc.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + obj.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//
-//        //sp
-//        entry = factory.defineRange(cpu, loadPerc, null, conf);
-//        assertEquals(TABLE_LAYOUT.SPO, entry.getKey());
-//        expected_start = URI_MARKER_STR + cpu.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + loadPerc.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //s
-//        entry = factory.defineRange(cpu, null, null, conf);
-//        assertEquals(TABLE_LAYOUT.SPO, entry.getKey());
-//        expected_start = URI_MARKER_STR + cpu.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //all
-//        entry = factory.defineRange(null, null, null, conf);
-//        assertEquals(TABLE_LAYOUT.SPO, entry.getKey());
-//        assertEquals("",
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(new String(new byte[]{Byte.MAX_VALUE}) + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//    }
-//
-//    public void testSPOCasesWithRanges() throws Exception {
-//        URI subj_start = vf.createURI(litdupsNS, "subj_start");
-//        URI subj_end = vf.createURI(litdupsNS, "subj_stop");
-//        URI pred_start = vf.createURI(litdupsNS, "pred_start");
-//        URI pred_end = vf.createURI(litdupsNS, "pred_stop");
-//        URI obj_start = vf.createURI(litdupsNS, "obj_start");
-//        URI obj_end = vf.createURI(litdupsNS, "obj_stop");
-//
-//        Value subj = new RangeValue(subj_start, subj_end);
-//        Value pred = new RangeValue(pred_start, pred_end);
-//        Value obj = new RangeValue(obj_start, obj_end);
-//
-//        //spo - o has range
-//        Map.Entry<TABLE_LAYOUT, Range> entry =
-//                factory.defineRange(subj_start, pred_start, obj, conf);
-//        assertEquals(TABLE_LAYOUT.SPO, entry.getKey());
-//        String expected_start = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + obj_start.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        String expected_end = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + obj_end.stringValue();
-//        assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //sp - p has range
-//        entry = factory.defineRange(subj_start, pred, null, conf);
-//        assertEquals(TABLE_LAYOUT.SPO, entry.getKey());
-//        expected_start = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + pred_start.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        expected_end = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + pred_end.stringValue();
-//        assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //s - s has range
-//        entry = factory.defineRange(subj, null, null, conf);
-//        assertEquals(TABLE_LAYOUT.SPO, entry.getKey());
-//        expected_start = URI_MARKER_STR + subj_start.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        expected_end = URI_MARKER_STR + subj_end.stringValue();
-//        assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//    }
-//
-//    public void testPOCases() throws Exception {
-//        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-//        URI obj = vf.createURI(litdupsNS, "uri1");
-//
-//        //po
-//        Map.Entry<TABLE_LAYOUT, Range> entry =
-//                factory.defineRange(null, loadPerc, obj, conf);
-//        assertEquals(TABLE_LAYOUT.PO, entry.getKey());
-//        String expected_start = URI_MARKER_STR + loadPerc.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + obj.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //p
-//        entry = factory.defineRange(null, loadPerc, null, conf);
-//        assertEquals(TABLE_LAYOUT.PO, entry.getKey());
-//        expected_start = URI_MARKER_STR + loadPerc.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//    }
-//
-//    public void testPOCasesWithRanges() throws Exception {
-//        URI pred_start = vf.createURI(litdupsNS, "pred_start");
-//        URI pred_end = vf.createURI(litdupsNS, "pred_stop");
-//        URI obj_start = vf.createURI(litdupsNS, "obj_start");
-//        URI obj_end = vf.createURI(litdupsNS, "obj_stop");
-//
-//        Value pred = new RangeValue(pred_start, pred_end);
-//        Value obj = new RangeValue(obj_start, obj_end);
-//
-//        //po
-//        Map.Entry<TABLE_LAYOUT, Range> entry =
-//                factory.defineRange(null, pred_start, obj, conf);
-//        assertEquals(TABLE_LAYOUT.PO, entry.getKey());
-//        String expected_start = URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + obj_start.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        String expected_end = URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + obj_end.stringValue();
-//        assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //p
-//        entry = factory.defineRange(null, pred, null, conf);
-//        assertEquals(TABLE_LAYOUT.PO, entry.getKey());
-//        expected_start = URI_MARKER_STR + pred_start.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        expected_end = URI_MARKER_STR + pred_end.stringValue();
-//        assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//    }
-//
-//    public void testOSPCases() throws Exception {
-//        URI cpu = vf.createURI(litdupsNS, "cpu");
-//        URI obj = vf.createURI(litdupsNS, "uri1");
-//
-//        //so
-//        Map.Entry<TABLE_LAYOUT, Range> entry =
-//                factory.defineRange(cpu, null, obj, conf);
-//        assertEquals(TABLE_LAYOUT.OSP, entry.getKey());
-//        String expected_start = URI_MARKER_STR + obj.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + cpu.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //o
-//        entry = factory.defineRange(null, null, obj, conf);
-//        assertEquals(TABLE_LAYOUT.OSP, entry.getKey());
-//        expected_start = URI_MARKER_STR + obj.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//    }
-//
-//
-//    public void testOSPCasesWithRanges() throws Exception {
-//        URI subj_start = vf.createURI(litdupsNS, "subj_start");
-//        URI subj_end = vf.createURI(litdupsNS, "subj_stop");
-//        URI obj_start = vf.createURI(litdupsNS, "obj_start");
-//        URI obj_end = vf.createURI(litdupsNS, "obj_stop");
-//
-//        Value subj = new RangeValue(subj_start, subj_end);
-//        Value obj = new RangeValue(obj_start, obj_end);
-//
-//        //so - s should be the range
-//        Map.Entry<TABLE_LAYOUT, Range> entry =
-//                factory.defineRange(subj, null, obj_start, conf);
-//        assertEquals(TABLE_LAYOUT.OSP, entry.getKey());
-//        String expected_start = URI_MARKER_STR + obj_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + subj_start.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        String expected_end = URI_MARKER_STR + obj_start.stringValue() + DELIM_BYTES_STR +
-//                URI_MARKER_STR + subj_end.stringValue();
-//        assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//
-//        //o - o is range
-//        entry = factory.defineRange(null, null, obj, conf);
-//        assertEquals(TABLE_LAYOUT.OSP, entry.getKey());
-//        expected_start = URI_MARKER_STR + obj_start.stringValue();
-//        assertEquals(expected_start,
-//                entry.getValue().getStartKey().getRow().toString());
-//        expected_end = URI_MARKER_STR + obj_end.stringValue();
-//        assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX,
-//                entry.getValue().getEndKey().getRow().toString());
-//    }
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/pom.xml
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/pom.xml b/dao/mongodb.rya/pom.xml
index bda7e59..0d87fa5 100644
--- a/dao/mongodb.rya/pom.xml
+++ b/dao/mongodb.rya/pom.xml
@@ -1,30 +1,48 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>mongodb.rya</artifactId>
-	<name>${project.groupId}.${project.artifactId}</name>
-	<properties>
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya.dao</artifactId>
+        <version>3.2.10-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>mongodb.rya</artifactId>
+    <name>Apache Rya MongoDB DAO</name>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.api</artifactId>
+        </dependency>
 
-	</properties>
-	<parent>
-		<groupId>mvm.rya</groupId>
-		<artifactId>rya.dao</artifactId>
-		<version>3.2.10-SNAPSHOT</version>
-	</parent>
-	<dependencies>
-		<dependency>
-			<groupId>mvm.rya</groupId>
-			<artifactId>rya.api</artifactId>
-		</dependency>
-		<dependency>
-			<groupId>org.mongodb</groupId>
-			<artifactId>mongo-java-driver</artifactId>
-			<version>2.13.0-rc0</version>
-		</dependency>
-		<dependency>
-			<groupId>de.flapdoodle.embed</groupId>
-			<artifactId>de.flapdoodle.embed.mongo</artifactId>
-			<version>1.50.0</version>
-		</dependency>
-	</dependencies>
+        <dependency>
+            <groupId>org.mongodb</groupId>
+            <artifactId>mongo-java-driver</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>de.flapdoodle.embed</groupId>
+            <artifactId>de.flapdoodle.embed.mongo</artifactId>
+        </dependency>
+    </dependencies>
 
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java
index c215184..57548ec 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBQueryEngine.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.Closeable;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java
index 0054847..3c5a8ef 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRdfConfiguration.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 
 import java.util.List;
 
@@ -84,7 +104,7 @@ public class MongoDBRdfConfiguration extends RdfCloudTripleStoreConfiguration {
     
     public void setAdditionalIndexers(Class<? extends RyaSecondaryIndexer>... indexers) {
         List<String> strs = Lists.newArrayList();
-        for (Class ai : indexers){
+        for (Class<?> ai : indexers){
             strs.add(ai.getName());
         }
         

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
index d3f9ee5..1f341dc 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.IOException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java
index 270b57f..fd9b659 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBNamespaceManager.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb.dao;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.persist.RyaNamespaceManager;
 import mvm.rya.api.persist.query.RyaQuery;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java
index 093f2dd..8a1004f 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/MongoDBStorageStrategy.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb.dao;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.persist.query.RyaQuery;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
index 1847b94..259420b 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
@@ -1,24 +1,36 @@
 package mvm.rya.mongodb.dao;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
-import java.util.HashSet;
 import java.util.Map;
-import java.util.Set;
 
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
 import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.RyaNamespaceManager;
-import mvm.rya.api.persist.query.RyaQuery;
 import mvm.rya.mongodb.MongoDBRdfConfiguration;
 
 import org.apache.commons.codec.binary.Hex;
 import org.openrdf.model.Namespace;
-import org.openrdf.model.impl.ValueFactoryImpl;
 
 import com.mongodb.BasicDBObject;
 import com.mongodb.DBCollection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
index 6de5b89..24d16c1 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb.dao;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Map;



[53/56] [abbrv] incubator-rya git commit: RYA-12 Adding support for Additional Iterators on Core Tables

Posted by mi...@apache.org.
RYA-12 Adding support for Additional Iterators on Core Tables

note: the config file format may change in the future.  This is really
just a change to the client Config API and Query Engine.

Also pulling in an orphan "Manual Flush" commit that did not make it
into the repo.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/e6be84a4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/e6be84a4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/e6be84a4

Branch: refs/heads/master
Commit: e6be84a407e05c66cb4b4b6ef225d7e07dd10fcf
Parents: 990f1ff
Author: Aaron Mihalik <mi...@alum.mit.edu>
Authored: Fri Dec 4 20:25:51 2015 -0500
Committer: Aaron Mihalik <mi...@alum.mit.edu>
Committed: Fri Dec 4 20:25:51 2015 -0500

----------------------------------------------------------------------
 .../rya/accumulo/AccumuloRdfConfiguration.java  | 74 +++++++++++++++++++-
 .../java/mvm/rya/accumulo/AccumuloRyaDAO.java   | 33 ++++-----
 .../accumulo/query/AccumuloRyaQueryEngine.java  |  6 ++
 .../accumulo/AccumuloRdfConfigurationTest.java  | 34 ++++++---
 .../mvm/rya/accumulo/AccumuloRyaDAOTest.java    | 58 +++++++++++++--
 .../accumulo/entity/AccumuloDocIndexerTest.java |  3 +-
 6 files changed, 173 insertions(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e6be84a4/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
index 147228b..709ceb9 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRdfConfiguration.java
@@ -21,11 +21,17 @@ package mvm.rya.accumulo;
 
 
 
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 
 import mvm.rya.accumulo.experimental.AccumuloIndexer;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 
+import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.hadoop.conf.Configuration;
 
@@ -42,6 +48,17 @@ public class AccumuloRdfConfiguration extends RdfCloudTripleStoreConfiguration {
     public static final String MAXRANGES_SCANNER = "ac.query.maxranges";
     
     public static final String CONF_ADDITIONAL_INDEXERS = "ac.additional.indexers";
+    
+    public static final String CONF_FLUSH_EACH_UPDATE = "ac.dao.flush";
+
+    public static final String ITERATOR_SETTINGS_SIZE = "ac.iterators.size";
+    public static final String ITERATOR_SETTINGS_BASE = "ac.iterators.%d.";
+    public static final String ITERATOR_SETTINGS_NAME = ITERATOR_SETTINGS_BASE + "name";
+    public static final String ITERATOR_SETTINGS_CLASS = ITERATOR_SETTINGS_BASE + "iteratorClass";
+    public static final String ITERATOR_SETTINGS_PRIORITY = ITERATOR_SETTINGS_BASE + "priority";
+    public static final String ITERATOR_SETTINGS_OPTIONS_SIZE = ITERATOR_SETTINGS_BASE + "optionsSize";
+    public static final String ITERATOR_SETTINGS_OPTIONS_KEY = ITERATOR_SETTINGS_BASE + "option.%d.name";
+    public static final String ITERATOR_SETTINGS_OPTIONS_VALUE = ITERATOR_SETTINGS_BASE + "option.%d.value";
 
     public AccumuloRdfConfiguration() {
         super();
@@ -73,7 +90,7 @@ public class AccumuloRdfConfiguration extends RdfCloudTripleStoreConfiguration {
 
     public void setAdditionalIndexers(Class<? extends AccumuloIndexer>... indexers) {
         List<String> strs = Lists.newArrayList();
-        for (Class ai : indexers){
+        for (Class<? extends AccumuloIndexer> ai : indexers){
             strs.add(ai.getName());
         }
         
@@ -83,4 +100,59 @@ public class AccumuloRdfConfiguration extends RdfCloudTripleStoreConfiguration {
     public List<AccumuloIndexer> getAdditionalIndexers() {
         return getInstances(CONF_ADDITIONAL_INDEXERS, AccumuloIndexer.class);
     }
+    public boolean flushEachUpdate(){
+        return getBoolean(CONF_FLUSH_EACH_UPDATE, true);
+    }
+
+    public void setFlush(boolean flush){
+        setBoolean(CONF_FLUSH_EACH_UPDATE, flush);
+    }
+
+    public void setAdditionalIterators(IteratorSetting... additionalIterators){
+        //TODO do we need to worry about cleaning up
+        this.set(ITERATOR_SETTINGS_SIZE, Integer.toString(additionalIterators.length));
+        int i = 0;
+        for(IteratorSetting iterator : additionalIterators) {
+            this.set(String.format(ITERATOR_SETTINGS_NAME, i), iterator.getName());
+            this.set(String.format(ITERATOR_SETTINGS_CLASS, i), iterator.getIteratorClass());
+            this.set(String.format(ITERATOR_SETTINGS_PRIORITY, i), Integer.toString(iterator.getPriority()));
+            Map<String, String> options = iterator.getOptions();
+
+            this.set(String.format(ITERATOR_SETTINGS_OPTIONS_SIZE, i), Integer.toString(options.size()));
+            Iterator<Entry<String, String>> it = options.entrySet().iterator();
+            int j = 0;
+            while(it.hasNext()) {
+                Entry<String, String> item = it.next();
+                this.set(String.format(ITERATOR_SETTINGS_OPTIONS_KEY, i, j), item.getKey());
+                this.set(String.format(ITERATOR_SETTINGS_OPTIONS_VALUE, i, j), item.getValue());
+                j++;
+            }
+            i++;
+        }
+    }
+
+    public IteratorSetting[] getAdditionalIterators(){
+        int size = Integer.valueOf(this.get(ITERATOR_SETTINGS_SIZE, "0"));
+        if(size == 0) {
+            return new IteratorSetting[0];
+        }
+
+        IteratorSetting[] settings = new IteratorSetting[size];
+        for(int i = 0; i < size; i++) {
+            String name = this.get(String.format(ITERATOR_SETTINGS_NAME, i));
+            String iteratorClass = this.get(String.format(ITERATOR_SETTINGS_CLASS, i));
+            int priority = Integer.valueOf(this.get(String.format(ITERATOR_SETTINGS_PRIORITY, i)));
+
+            int optionsSize = Integer.valueOf(this.get(String.format(ITERATOR_SETTINGS_OPTIONS_SIZE, i)));
+            Map<String, String> options = new HashMap<String, String>(optionsSize);
+            for(int j = 0; j < optionsSize; j++) {
+                String key = this.get(String.format(ITERATOR_SETTINGS_OPTIONS_KEY, i, j));
+                String value = this.get(String.format(ITERATOR_SETTINGS_OPTIONS_VALUE, i, j));
+                options.put(key, value);
+            }
+            settings[i] = new IteratorSetting(priority, name, iteratorClass, options);
+        }
+
+        return settings;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e6be84a4/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
index 84fae68..8a6bd00 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
@@ -79,15 +79,11 @@ import mvm.rya.api.resolver.RyaTripleContext;
 import mvm.rya.api.resolver.triple.TripleRow;
 import mvm.rya.api.resolver.triple.TripleRowResolverException;
 
-/**
- * Class AccumuloRyaDAO
- * Date: Feb 29, 2012
- * Time: 12:37:22 PM
- */
 public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaNamespaceManager<AccumuloRdfConfiguration> {
     private static final Log logger = LogFactory.getLog(AccumuloRyaDAO.class);
 
     private boolean initialized = false;
+    private boolean flushEachUpdate = true;
     private Connector connector;
     private BatchWriterConfig batchWriterConfig;
 
@@ -134,6 +130,8 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
 
             secondaryIndexers = conf.getAdditionalIndexers();
 
+            flushEachUpdate = conf.flushEachUpdate();
+            
             TableOperations tableOperations = connector.tableOperations();
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getSpo());
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getPo());
@@ -151,9 +149,8 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             bw_po = mt_bw.getBatchWriter(tableLayoutStrategy.getPo());
             bw_osp = mt_bw.getBatchWriter(tableLayoutStrategy.getOsp());
 
-            bw_ns = connector.createBatchWriter(tableLayoutStrategy.getNs(), MAX_MEMORY,
-                    MAX_TIME, 1);
-
+            bw_ns = mt_bw.getBatchWriter(tableLayoutStrategy.getNs());
+            
             for (AccumuloIndexer index : secondaryIndexers) {
                 index.setMultiTableBatchWriter(mt_bw);
             }
@@ -193,7 +190,6 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     @Override
     public void delete(RyaStatement stmt, AccumuloRdfConfiguration aconf) throws RyaDAOException {
         this.delete(Iterators.singletonIterator(stmt), aconf);
-        //TODO currently all indexers do not support delete
     }
 
     @Override
@@ -211,8 +207,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                     index.deleteStatement(stmt);
                 }
             }
-            mt_bw.flush();
-            //TODO currently all indexers do not support delete
+            if (flushEachUpdate) { mt_bw.flush(); }
         } catch (Exception e) {
             throw new RyaDAOException(e);
         }
@@ -299,7 +294,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                 }
             }
 
-            mt_bw.flush();
+            if (flushEachUpdate) { mt_bw.flush(); }
         } catch (Exception e) {
             throw new RyaDAOException(e);
         }
@@ -314,10 +309,8 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
         try {
             initialized = false;
             mt_bw.flush();
-            bw_ns.flush();
 
             mt_bw.close();
-            bw_ns.close();
         } catch (Exception e) {
             throw new RyaDAOException(e);
         }
@@ -329,7 +322,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             Mutation m = new Mutation(new Text(pfx));
             m.put(INFO_NAMESPACE_TXT, EMPTY_TEXT, new Value(namespace.getBytes()));
             bw_ns.addMutation(m);
-            bw_ns.flush();
+            if (flushEachUpdate) { mt_bw.flush(); }
         } catch (Exception e) {
             throw new RyaDAOException(e);
         }
@@ -360,7 +353,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             Mutation del = new Mutation(new Text(pfx));
             del.putDelete(INFO_NAMESPACE_TXT, EMPTY_TEXT);
             bw_ns.addMutation(del);
-            bw_ns.flush();
+            if (flushEachUpdate) { mt_bw.flush(); }
         } catch (Exception e) {
             throw new RyaDAOException(e);
         }
@@ -464,6 +457,14 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
         this.queryEngine = queryEngine;
     }
 
+    public void flush() throws RyaDAOException {
+        try {
+            mt_bw.flush();
+        } catch (MutationsRejectedException e) {
+            throw new RyaDAOException(e);
+        }
+    }
+
     protected String[] getTables() {
         // core tables
         List<String> tableNames = Lists.newArrayList(

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e6be84a4/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
index 1d0d9c9..869a128 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/AccumuloRyaQueryEngine.java
@@ -388,6 +388,12 @@ public class AccumuloRyaQueryEngine implements RyaQueryEngine<AccumuloRdfConfigu
             RegExFilter.setRegexs(setting, regex, null, null, null, false);
             scanner.addScanIterator(setting);
         }
+        if (conf instanceof AccumuloRdfConfiguration) {
+            //TODO should we take the iterator settings as is or should we adjust the priority based on the above?
+            for (IteratorSetting itr : ((AccumuloRdfConfiguration)conf).getAdditionalIterators()) {
+                scanner.addScanIterator(itr);
+            }
+        }
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e6be84a4/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
index b7c9079..ffd316e 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
@@ -21,20 +21,19 @@ package mvm.rya.accumulo;
 
 
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.security.Authorizations;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Arrays;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-/**
- * Date: 1/28/13
- * Time: 8:36 AM
- */
 public class AccumuloRdfConfigurationTest {
     private static final Logger logger = LoggerFactory.getLogger(AccumuloRdfConfigurationTest.class);
 
@@ -56,4 +55,21 @@ public class AccumuloRdfConfigurationTest {
         assertEquals(str, conf.getAuth());
         assertEquals(auths, conf.getAuthorizations());
     }
+
+    @Test
+    public void testIterators() {
+        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+        Map<String, String> options = new HashMap<String, String>();
+        options.put("key1", "value1");
+        options.put("key2", "value2");
+        IteratorSetting setting = new IteratorSetting(1, "test", "test2", options);
+
+        conf.setAdditionalIterators(setting);
+        IteratorSetting[] iteratorSettings = conf.getAdditionalIterators();
+        assertTrue(iteratorSettings.length == 1);
+
+        assertEquals(setting, iteratorSettings[0]);
+
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e6be84a4/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
index ab4528b..5c30e67 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
@@ -21,9 +21,18 @@ package mvm.rya.accumulo;
 
 
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
 import info.aduna.iteration.CloseableIteration;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.UUID;
+
 import mvm.rya.accumulo.query.AccumuloRyaQueryEngine;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaURI;
@@ -33,10 +42,11 @@ import mvm.rya.api.resolver.RdfToRyaConversions;
 import mvm.rya.api.resolver.RyaContext;
 
 import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.mock.MockInstance;
-import org.calrissian.mango.collect.CloseableIterable;
+import org.apache.accumulo.core.iterators.FirstEntryInRowIterator;
 import org.calrissian.mango.collect.FluentCloseableIterable;
 import org.junit.After;
 import org.junit.Before;
@@ -44,11 +54,6 @@ import org.junit.Test;
 import org.openrdf.model.ValueFactory;
 import org.openrdf.model.impl.ValueFactoryImpl;
 import org.openrdf.model.vocabulary.XMLSchema;
-import org.openrdf.query.BindingSet;
-
-import java.util.*;
-
-import static org.junit.Assert.*;
 
 /**
  * Class AccumuloRdfDAOTest
@@ -631,6 +636,45 @@ public class AccumuloRyaDAOTest {
         assertFalse(dao.isInitialized());
     }
 
+    @Test
+    public void testQueryWithIterators() throws Exception {
+        RyaURI cpu = new RyaURI(litdupsNS + "cpu");
+        RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc");
+        RyaURI uri1 = new RyaURI(litdupsNS + "uri1");
+        dao.add(new RyaStatement(cpu, loadPerc, uri1, null, "qual1"));
+        dao.add(new RyaStatement(cpu, loadPerc, uri1, null, "qual2"));
+
+        AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine();
+
+        AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf);
+        IteratorSetting firstEntryInRow = new IteratorSetting(3 /* correct value?? */, FirstEntryInRowIterator.class);
+        queryConf.setAdditionalIterators(firstEntryInRow);
+
+        Collection<RyaStatement> coll = new ArrayList<>();
+        coll.add(new RyaStatement(null, loadPerc, uri1));
+        CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.batchQuery(coll, queryConf);
+        int count = 0;
+        while (iter.hasNext()) {
+            count++;
+            iter.next();
+        }
+        iter.close();
+        assertEquals(1, count);
+
+        //Assert that without the iterator we get 2
+        coll = new ArrayList<>();
+        coll.add(new RyaStatement(null, loadPerc, uri1));
+        iter = queryEngine.batchQuery(coll, conf);
+        count = 0;
+        while (iter.hasNext()) {
+            count++;
+            iter.next();
+        }
+        iter.close();
+        assertEquals(2, count);
+
+    }
+
     private boolean areTablesEmpty() throws TableNotFoundException {
         for (String table : dao.getTables()) {
             if (tableExists(table)) {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e6be84a4/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
index e7e06d9..6237697 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/entity/AccumuloDocIndexerTest.java
@@ -26,14 +26,12 @@ import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 
-import junit.framework.Assert;
 import mvm.rya.accumulo.AccumuloRdfConfiguration;
 import mvm.rya.accumulo.RyaTableMutationsFactory;
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.layout.TablePrefixLayoutStrategy;
 import mvm.rya.api.resolver.RyaToRdfConversions;
 import mvm.rya.api.resolver.RyaTripleContext;
 import mvm.rya.indexing.accumulo.ConfigUtils;
@@ -43,6 +41,7 @@ import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.hadoop.conf.Configuration;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.openrdf.model.Value;


[13/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java
deleted file mode 100644
index 98923c0..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferUnion.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.Union;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Class InferUnion
- * Date: Mar 14, 2012
- * Time: 12:43:49 PM
- */
-public class InferUnion extends Union {
-    private Map<String, String> properties = new HashMap<String, String>();
-
-    public InferUnion() {
-    }
-
-    public InferUnion(TupleExpr leftArg, TupleExpr rightArg) {
-        super(leftArg, rightArg);
-    }
-
-    public Map<String, String> getProperties() {
-        return properties;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java
deleted file mode 100644
index 8bdb15f..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngine.java
+++ /dev/null
@@ -1,409 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import com.tinkerpop.blueprints.Direction;
-import com.tinkerpop.blueprints.Edge;
-import com.tinkerpop.blueprints.Graph;
-import com.tinkerpop.blueprints.Vertex;
-import com.tinkerpop.blueprints.impls.tg.TinkerGraphFactory;
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.utils.RyaDAOHelper;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.vocabulary.OWL;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.query.QueryEvaluationException;
-
-import java.util.*;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Will pull down inference relationships from dao every x seconds. <br>
- * Will infer extra relationships. <br>
- * Will cache relationships in Graph for later use. <br>
- */
-public class InferenceEngine {
-
-    private Graph subClassOfGraph;
-    private Graph subPropertyOfGraph;
-    private Set<URI> symmetricPropertySet;
-    private Map<URI, URI> inverseOfMap;
-    private Set<URI> transitivePropertySet;
-
-    private RyaDAO ryaDAO;
-    private RdfCloudTripleStoreConfiguration conf;
-    private boolean initialized = false;
-    private boolean schedule = true;
-
-    private long refreshGraphSchedule = 5 * 60 * 1000; //5 min
-    private Timer timer;
-    public static final String URI_PROP = "uri";
-
-    public void init() throws InferenceEngineException {
-        try {
-            if (isInitialized()) {
-                return;
-            }
-
-            checkNotNull(conf, "Configuration is null");
-            checkNotNull(ryaDAO, "RdfDao is null");
-            checkArgument(ryaDAO.isInitialized(), "RdfDao is not initialized");
-
-            if (schedule) {
-                timer = new Timer(InferenceEngine.class.getName());
-                timer.scheduleAtFixedRate(new TimerTask() {
-
-                    @Override
-                    public void run() {
-                        try {
-                            refreshGraph();
-                        } catch (InferenceEngineException e) {
-                            throw new RuntimeException(e);
-                        }
-                    }
-
-                }, refreshGraphSchedule, refreshGraphSchedule);
-            }
-            refreshGraph();
-            setInitialized(true);
-        } catch (RyaDAOException e) {
-            throw new InferenceEngineException(e);
-        }
-    }
-
-    public void destroy() throws InferenceEngineException {
-        setInitialized(false);
-        if (timer != null) {
-            timer.cancel();
-        }
-    }
-
-    public void refreshGraph() throws InferenceEngineException {
-        try {
-            //get all subclassof
-            Graph graph = TinkerGraphFactory.createTinkerGraph();
-            CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null,
-                    RDFS.SUBCLASSOF, null, conf);
-            try {
-                while (iter.hasNext()) {
-                    String edgeName = RDFS.SUBCLASSOF.stringValue();
-                    Statement st = iter.next();
-                    addStatementEdge(graph, edgeName, st);
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
-            }
-
-            subClassOfGraph = graph; //TODO: Should this be synchronized?
-
-            graph = TinkerGraphFactory.createTinkerGraph();
-
-            iter = RyaDAOHelper.query(ryaDAO, null,
-                    RDFS.SUBPROPERTYOF, null, conf);
-            try {
-                while (iter.hasNext()) {
-                    String edgeName = RDFS.SUBPROPERTYOF.stringValue();
-                    Statement st = iter.next();
-                    addStatementEdge(graph, edgeName, st);
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
-            }
-
-            //equiv property really is the same as a subPropertyOf both ways
-            iter = RyaDAOHelper.query(ryaDAO, null, OWL.EQUIVALENTPROPERTY, null, conf);
-            try {
-                while (iter.hasNext()) {
-                    String edgeName = RDFS.SUBPROPERTYOF.stringValue();
-                    Statement st = iter.next();
-                    addStatementEdge(graph, edgeName, st);
-                    //reverse is also true
-                    addStatementEdge(graph, edgeName, new StatementImpl((Resource) st.getObject(), st.getPredicate(), st.getSubject()));
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
-            }
-
-            subPropertyOfGraph = graph; //TODO: Should this be synchronized?
-
-            iter = RyaDAOHelper.query(ryaDAO, null, RDF.TYPE, OWL.SYMMETRICPROPERTY, conf);
-            Set<URI> symProp = new HashSet();
-            try {
-                while (iter.hasNext()) {
-                    Statement st = iter.next();
-                    symProp.add((URI) st.getSubject()); //safe to assume it is a URI?
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
-            }
-            symmetricPropertySet = symProp;
-
-            iter = RyaDAOHelper.query(ryaDAO, null, RDF.TYPE, OWL.TRANSITIVEPROPERTY, conf);
-            Set<URI> transProp = new HashSet();
-            try {
-                while (iter.hasNext()) {
-                    Statement st = iter.next();
-                    transProp.add((URI) st.getSubject());
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
-            }
-            transitivePropertySet = transProp;
-
-            iter = RyaDAOHelper.query(ryaDAO, null, OWL.INVERSEOF, null, conf);
-            Map<URI, URI> invProp = new HashMap();
-            try {
-                while (iter.hasNext()) {
-                    Statement st = iter.next();
-                    invProp.put((URI) st.getSubject(), (URI) st.getObject());
-                    invProp.put((URI) st.getObject(), (URI) st.getSubject());
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
-            }
-            inverseOfMap = invProp;
-        } catch (QueryEvaluationException e) {
-            throw new InferenceEngineException(e);
-        }
-    }
-
-    protected void addStatementEdge(Graph graph, String edgeName, Statement st) {
-        Resource subj = st.getSubject();
-        Vertex a = graph.getVertex(subj);
-        if (a == null) {
-            a = graph.addVertex(subj);
-            a.setProperty(URI_PROP, subj);
-        }
-        Value obj = st.getObject();
-        Vertex b = graph.getVertex(obj);
-        if (b == null) {
-            b = graph.addVertex(obj);
-            b.setProperty(URI_PROP, obj);
-        }
-        graph.addEdge(null, a, b, edgeName);
-    }
-
-    public Set<URI> findParents(Graph graph, URI vertexId) {
-        Set<URI> parents = new HashSet();
-        if (graph == null) {
-            return parents;
-        }
-        Vertex v = graph.getVertex(vertexId);
-        if (v == null) {
-            return parents;
-        }
-        addParents(v, parents);
-        return parents;
-    }
-
-    private static void addParents(Vertex v, Set<URI> parents) {
-        for (Edge edge : v.getEdges(Direction.IN)) {
-            Vertex ov = edge.getVertex(Direction.OUT);
-            Object o = ov.getProperty(URI_PROP);
-            if (o != null && o instanceof URI) {
-                boolean contains = parents.contains(o);
-                if (!contains) {
-                    parents.add((URI) o);
-                    addParents(ov, parents);
-                }
-            }
-
-        }
-    }
-
-    public boolean isSymmetricProperty(URI prop) {
-        return (symmetricPropertySet != null) && symmetricPropertySet.contains(prop);
-    }
-
-    public URI findInverseOf(URI prop) {
-        return (inverseOfMap != null) ? inverseOfMap.get(prop) : (null);
-    }
-
-    public boolean isTransitiveProperty(URI prop) {
-        return (transitivePropertySet != null) && transitivePropertySet.contains(prop);
-    }
-
-    /**
-     * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go
-     */
-    public Set<Statement> findTransitiveProperty(Resource subj, URI prop, Value obj, Resource... contxts) throws InferenceEngineException {
-        if (transitivePropertySet.contains(prop)) {
-            Set<Statement> sts = new HashSet();
-            boolean goUp = subj == null;
-            chainTransitiveProperty(subj, prop, obj, (goUp) ? (obj) : (subj), sts, goUp, contxts);
-            return sts;
-        } else
-            return null;
-    }
-
-    /**
-     * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go
-     */
-    public Set<Resource> findSameAs(Resource value, Resource... contxts) throws InferenceEngineException{
-		Set<Resource> sameAs = new HashSet<Resource>();
-		sameAs.add(value);
-		findSameAsChaining(value, sameAs, contxts);
-		return sameAs;
-    }
-
-    /**
-     * TODO: This chaining can be slow at query execution. the other option is to perform this in the query itself, but that will be constrained to how many levels we decide to go
-     */
-    public void findSameAsChaining(Resource subj, Set<Resource> currentSameAs, Resource[] contxts) throws InferenceEngineException{
-        try {
-			CloseableIteration<Statement, QueryEvaluationException> subjIter = RyaDAOHelper.query(ryaDAO, subj, OWL.SAMEAS, null, conf, contxts);
-			while (subjIter.hasNext()){
-				Statement st = subjIter.next();
-				if (!currentSameAs.contains(st.getObject())){
-					Resource castedObj = (Resource) st.getObject();
-					currentSameAs.add(castedObj);
-					findSameAsChaining(castedObj, currentSameAs, contxts);
-				}
-			}
-			subjIter.close();
-			CloseableIteration<Statement, QueryEvaluationException> objIter = RyaDAOHelper.query(ryaDAO, null, OWL.SAMEAS, subj, conf, contxts);
-			while (objIter.hasNext()){
-				Statement st = objIter.next();
-				if (!currentSameAs.contains(st.getSubject())){
-					Resource sameAsSubj = st.getSubject();
-					currentSameAs.add(sameAsSubj);
-					findSameAsChaining(sameAsSubj, currentSameAs, contxts);
-				}
-			}
-			objIter.close();
-		} catch (QueryEvaluationException e) {
-			throw new InferenceEngineException(e);
-		}
-
-    }
-
-    protected void chainTransitiveProperty(Resource subj, URI prop, Value obj, Value core, Set<Statement> sts, boolean goUp, Resource[] contxts) throws InferenceEngineException {
-        try {
-            CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, subj, prop, obj, conf, contxts);
-            while (iter.hasNext()) {
-                Statement st = iter.next();
-                sts.add(new StatementImpl((goUp) ? (st.getSubject()) : (Resource) (core), prop, (!goUp) ? (st.getObject()) : (core)));
-                if (goUp) {
-                    chainTransitiveProperty(null, prop, st.getSubject(), core, sts, goUp, contxts);
-                } else {
-                    chainTransitiveProperty((Resource) st.getObject(), prop, null, core, sts, goUp, contxts);
-                }
-            }
-            iter.close();
-        } catch (QueryEvaluationException e) {
-            throw new InferenceEngineException(e);
-        }
-    }
-
-    public boolean isInitialized() {
-        return initialized;
-    }
-
-    public void setInitialized(boolean initialized) {
-        this.initialized = initialized;
-    }
-
-    public RyaDAO getRyaDAO() {
-        return ryaDAO;
-    }
-
-    public void setRyaDAO(RyaDAO ryaDAO) {
-        this.ryaDAO = ryaDAO;
-    }
-
-    public RdfCloudTripleStoreConfiguration getConf() {
-        return conf;
-    }
-
-    public void setConf(RdfCloudTripleStoreConfiguration conf) {
-        this.conf = conf;
-    }
-
-    public Graph getSubClassOfGraph() {
-        return subClassOfGraph;
-    }
-
-    public Graph getSubPropertyOfGraph() {
-        return subPropertyOfGraph;
-    }
-
-    public long getRefreshGraphSchedule() {
-        return refreshGraphSchedule;
-    }
-
-    public void setRefreshGraphSchedule(long refreshGraphSchedule) {
-        this.refreshGraphSchedule = refreshGraphSchedule;
-    }
-
-    public Set<URI> getSymmetricPropertySet() {
-        return symmetricPropertySet;
-    }
-
-    public void setSymmetricPropertySet(Set<URI> symmetricPropertySet) {
-        this.symmetricPropertySet = symmetricPropertySet;
-    }
-
-    public Map<URI, URI> getInverseOfMap() {
-        return inverseOfMap;
-    }
-
-    public void setInverseOfMap(Map<URI, URI> inverseOfMap) {
-        this.inverseOfMap = inverseOfMap;
-    }
-
-    public Set<URI> getTransitivePropertySet() {
-        return transitivePropertySet;
-    }
-
-    public void setTransitivePropertySet(Set<URI> transitivePropertySet) {
-        this.transitivePropertySet = transitivePropertySet;
-    }
-
-    public boolean isSchedule() {
-        return schedule;
-    }
-
-    public void setSchedule(boolean schedule) {
-        this.schedule = schedule;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java
deleted file mode 100644
index c4538d2..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferenceEngineException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-/**
- * Date: 7/20/12
- * Time: 11:03 AM
- */
-public class InferenceEngineException extends Exception {
-    public InferenceEngineException() {
-    }
-
-    public InferenceEngineException(String s) {
-        super(s);
-    }
-
-    public InferenceEngineException(String s, Throwable throwable) {
-        super(s, throwable);
-    }
-
-    public InferenceEngineException(Throwable throwable) {
-        super(throwable);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java
deleted file mode 100644
index 6ee060b..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InverseOfVisitor.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import org.openrdf.model.URI;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.model.vocabulary.SESAME;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Union;
-import org.openrdf.query.algebra.Var;
-
-/**
- * All predicates are changed
- * Class SubPropertyOfVisitor
- * Date: Mar 29, 2011
- * Time: 11:28:34 AM
- */
-public class InverseOfVisitor extends AbstractInferVisitor {
-
-    public InverseOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
-        super(conf, inferenceEngine);
-        include = conf.isInferInverseOf();
-    }
-
-    @Override
-    protected void meetSP(StatementPattern node) throws Exception {
-        StatementPattern sp = node.clone();
-        final Var predVar = sp.getPredicateVar();
-
-        URI pred = (URI) predVar.getValue();
-        String predNamespace = pred.getNamespace();
-
-        final Var objVar = sp.getObjectVar();
-        final Var cntxtVar = sp.getContextVar();
-        if (objVar != null &&
-                !RDF.NAMESPACE.equals(predNamespace) &&
-                !SESAME.NAMESPACE.equals(predNamespace) &&
-                !RDFS.NAMESPACE.equals(predNamespace)
-                && !EXPANDED.equals(cntxtVar)) {
-            /**
-             *
-             * { ?a ?pred ?b .}\n" +
-             "       UNION " +
-             "      { ?b ?pred ?a }
-             */
-
-            URI predUri = (URI) predVar.getValue();
-            URI invPropUri = inferenceEngine.findInverseOf(predUri);
-            if (invPropUri != null) {
-                Var subjVar = sp.getSubjectVar();
-                Union union = new InferUnion();
-                union.setLeftArg(sp);
-                union.setRightArg(new StatementPattern(objVar, new Var(predVar.getName(), invPropUri), subjVar, cntxtVar));
-                node.replaceWith(union);
-            }
-        }
-    }
-    
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java
deleted file mode 100644
index f8f9788..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SameAsVisitor.java
+++ /dev/null
@@ -1,186 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.utils.NullableStatementImpl;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
-import org.openrdf.model.Resource;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.vocabulary.OWL;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.model.vocabulary.SESAME;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-
-import java.util.HashSet;
-import java.util.Set;
-import java.util.UUID;
-
-/**
- * All predicates are changed
- * Class SubPropertyOfVisitor
- * Date: Mar 29, 2011
- * Time: 11:28:34 AM
- */
-public class SameAsVisitor extends AbstractInferVisitor {
-
-    public SameAsVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
-        super(conf, inferenceEngine);
-        include = conf.isInferSubPropertyOf(); // oops
-    }
-    
-    public void meet(StatementPattern sp) throws Exception {
-        if (!include) {
-            return;
-        }
-        if (sp instanceof FixedStatementPattern || sp instanceof TransitivePropertySP || sp instanceof DoNotExpandSP) {
-            return;   //already inferred somewhere else
-        }
-        final Var predVar = sp.getPredicateVar();
-        //do not know when things are null
-        if (predVar == null) {
-            return;
-        }
-        meetSP(sp);
-    }
-
-    @Override
-    protected void meetSP(StatementPattern node) throws Exception {
-        StatementPattern sp = node.clone();
-        final Var predVar = sp.getPredicateVar();
-
-        boolean shouldExpand = true;
-        if (predVar.hasValue()){
-            URI pred = (URI) predVar.getValue();
-            String predNamespace = pred.getNamespace();
-            shouldExpand = !pred.equals(OWL.SAMEAS) && 
-            !RDF.NAMESPACE.equals(predNamespace) &&
-            !SESAME.NAMESPACE.equals(predNamespace) &&
-            !RDFS.NAMESPACE.equals(predNamespace);
-        }
-
-        final Var objVar = sp.getObjectVar();
-        final Var subjVar = sp.getSubjectVar();
-        final Var cntxtVar = sp.getContextVar();
-        if (shouldExpand
-                && !EXPANDED.equals(cntxtVar) && !(objVar == null) && !(subjVar == null)){
-            if (objVar.getValue() == null) {
-            	Value subjVarValue = subjVar.getValue();
-            	if (subjVarValue instanceof Resource){
-            		Set<Resource> uris = inferenceEngine.findSameAs((Resource)subjVar.getValue(), getVarValue(cntxtVar));
-            		if (uris.size() > 1){
-            			InferJoin join = getReplaceJoin(uris, true, subjVar, objVar, predVar, cntxtVar);
-            			node.replaceWith(join);  
-            		}
-            	}
-            }
-            else if (subjVar.getValue() == null) {
-            	Value objVarValue = objVar.getValue();
-            	if (objVarValue instanceof Resource){
-            		Set<Resource> uris = inferenceEngine.findSameAs((Resource)objVar.getValue(), getVarValue(cntxtVar));
-                	if (uris.size() > 1){
-                        InferJoin join = getReplaceJoin(uris, false, subjVar, objVar, predVar, cntxtVar);
-                        node.replaceWith(join);  
-                	}
-            	}  	
-            }
-            else {
-            	// both subj and pred are set and should be expanded
-            	Set<Resource> subjURIs = new HashSet<Resource>();
-            	Set<Resource> objURIs = new HashSet<Resource>();
-            	// TODO I don't like these checks -- is there a better way to do this?
-            	Value objVarValue = objVar.getValue();
-           	    if (objVarValue instanceof Resource){
-           	    	objURIs = inferenceEngine.findSameAs((Resource)objVar.getValue(), getVarValue(cntxtVar));
-            	}
-            	Value subjVarValue = subjVar.getValue();
-            	if (subjVarValue instanceof Resource){
-            		subjURIs = inferenceEngine.findSameAs((Resource)subjVar.getValue(), getVarValue(cntxtVar));
-            	}
-            	InferJoin finalJoin = null;
-            	// expand subj first
-            	if (subjURIs.size() > 1){
-            		finalJoin = getReplaceJoin(subjURIs, true, subjVar, objVar, predVar, cntxtVar);
-            	}
-            	// now expand the obj
-            	if (objURIs.size() > 1){
-            		// if we already expanded the subj
-            		if (finalJoin != null){
-            			// we know what this is since we created it
-            			DoNotExpandSP origStatement = (DoNotExpandSP) finalJoin.getRightArg();
-            	        String s = UUID.randomUUID().toString();
-            	        Var dummyVar = new Var(s);
-            			StatementPattern origDummyStatement = new DoNotExpandSP(origStatement.getSubjectVar(), origStatement.getPredicateVar(), dummyVar, cntxtVar);
-            	        FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), objVar, cntxtVar);
-            	        for (Resource sameAs : objURIs){
-            	    		NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)objVar.getValue(), getVarValue(cntxtVar));
-            	            fsp.statements.add(newStatement);        		
-            	    	}
-            	        InferJoin interimJoin = new InferJoin(fsp, origDummyStatement);
-            	        finalJoin = new InferJoin(finalJoin.getLeftArg(), interimJoin);
-            		}
-            		else {
-            			finalJoin = getReplaceJoin(objURIs, false, subjVar, objVar, predVar, cntxtVar);
-            		}
-            		
-            	}
-            	if (finalJoin != null){
-            	    node.replaceWith(finalJoin);
-            	}
-            }
-        }
-    }
-    
-    private InferJoin getReplaceJoin(Set<Resource> uris, boolean subSubj, Var subjVar, Var objVar, Var predVar, Var cntxtVar){
-        String s = UUID.randomUUID().toString();
-        Var dummyVar = new Var(s);
-        StatementPattern origStatement;
-        Var subVar;
-        if (subSubj){
-        	subVar = subjVar;
-        	origStatement = new DoNotExpandSP(dummyVar, predVar, objVar, cntxtVar);
-        }
-        else {
-        	subVar = objVar;
-        	origStatement = new DoNotExpandSP(subjVar, predVar, dummyVar, cntxtVar);
-       }
-        FixedStatementPattern fsp = new FixedStatementPattern(dummyVar, new Var("c-" + s, OWL.SAMEAS), subVar, cntxtVar);
-        for (Resource sameAs : uris){
-    		NullableStatementImpl newStatement = new NullableStatementImpl(sameAs, OWL.SAMEAS, (Resource)subVar.getValue(), getVarValue(cntxtVar));
-            fsp.statements.add(newStatement);        		
-    	}
-        InferJoin join = new InferJoin(fsp, origStatement);
-        join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
-       return join;
-    }
-    
-    protected Resource getVarValue(Var var) {
-        if (var == null)
-            return null;
-        else
-            return (Resource)var.getValue();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java
deleted file mode 100644
index 485224d..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubClassOfVisitor.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.utils.NullableStatementImpl;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import org.openrdf.model.URI;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-
-import java.util.Collection;
-import java.util.UUID;
-
-/**
- * Class SubClassOfVisitor
- * Date: Mar 29, 2011
- * Time: 11:28:34 AM
- */
-public class SubClassOfVisitor extends AbstractInferVisitor {
-
-    public SubClassOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
-        super(conf, inferenceEngine);
-        include = conf.isInferSubClassOf();
-    }
-
-    @Override
-    protected void meetSP(StatementPattern node) throws Exception {
-        StatementPattern sp = node.clone();
-        final Var predVar = sp.getPredicateVar();
-        final Var objVar = sp.getObjectVar();
-        final Var conVar = sp.getContextVar();
-        if (predVar != null && objVar != null && objVar.getValue() != null && RDF.TYPE.equals(predVar.getValue())
-                && !EXPANDED.equals(conVar)) {
-            /**
-             * ?type sesame:directSubClassOf ub:Student . ?student rdf:type ?type +
-             */
-//            String s = UUID.randomUUID().toString();
-//            Var typeVar = new Var(s);
-//            StatementPattern subClassOf = new StatementPattern(typeVar, new Var("c-" + s, SESAME.DIRECTSUBCLASSOF), objVar, SUBCLASS_EXPANDED);
-//            StatementPattern rdfType = new StatementPattern(sp.getSubjectVar(), sp.getPredicateVar(), typeVar, SUBCLASS_EXPANDED);
-//            InferJoin join = new InferJoin(subClassOf, rdfType);
-//            join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
-//            node.replaceWith(join);
-
-            URI subclassof_uri = (URI) objVar.getValue();
-            Collection<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), subclassof_uri);
-            if (parents != null && parents.size() > 0) {
-                String s = UUID.randomUUID().toString();
-                Var typeVar = new Var(s);
-                FixedStatementPattern fsp = new FixedStatementPattern(typeVar, new Var("c-" + s, RDFS.SUBCLASSOF), objVar, conVar);
-                fsp.statements.add(new NullableStatementImpl(subclassof_uri, RDFS.SUBCLASSOF, subclassof_uri));
-                for (URI u : parents) {
-                    fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBCLASSOF, subclassof_uri));
-                }
-
-                StatementPattern rdfType = new DoNotExpandSP(sp.getSubjectVar(), sp.getPredicateVar(), typeVar, conVar);
-                InferJoin join = new InferJoin(fsp, rdfType);
-                join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
-                node.replaceWith(join);
-            }
-
-//            if (parents != null && parents.size() > 0) {
-//                StatementPatterns statementPatterns = new StatementPatterns();
-//                statementPatterns.patterns.add(node);
-//                Var subjVar = node.getSubjectVar();
-//                for (URI u : parents) {
-//                    statementPatterns.patterns.add(new StatementPattern(subjVar, predVar, new Var(objVar.getName(), u)));
-//                }
-//                node.replaceWith(statementPatterns);
-//            }
-
-//            if (parents != null && parents.size() > 0) {
-//                VarCollection vc = new VarCollection();
-//                vc.setName(objVar.getName());
-//                vc.values.add(objVar);
-//                for (URI u : parents) {
-//                    vc.values.add(new Var(objVar.getName(), u));
-//                }
-//                Var subjVar = node.getSubjectVar();
-//                node.replaceWith(new StatementPattern(subjVar, predVar, vc, node.getContextVar()));
-//            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java
deleted file mode 100644
index 14d56c9..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SubPropertyOfVisitor.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.utils.NullableStatementImpl;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import org.openrdf.model.URI;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.model.vocabulary.SESAME;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-
-import java.util.Set;
-import java.util.UUID;
-
-/**
- * All predicates are changed
- * Class SubPropertyOfVisitor
- * Date: Mar 29, 2011
- * Time: 11:28:34 AM
- */
-public class SubPropertyOfVisitor extends AbstractInferVisitor {
-
-    public SubPropertyOfVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
-        super(conf, inferenceEngine);
-        include = conf.isInferSubPropertyOf();
-    }
-
-    @Override
-    protected void meetSP(StatementPattern node) throws Exception {
-        StatementPattern sp = node.clone();
-        final Var predVar = sp.getPredicateVar();
-
-        URI pred = (URI) predVar.getValue();
-        String predNamespace = pred.getNamespace();
-
-        final Var objVar = sp.getObjectVar();
-        final Var cntxtVar = sp.getContextVar();
-        if (objVar != null &&
-                !RDF.NAMESPACE.equals(predNamespace) &&
-                !SESAME.NAMESPACE.equals(predNamespace) &&
-                !RDFS.NAMESPACE.equals(predNamespace)
-                && !EXPANDED.equals(cntxtVar)) {
-            /**
-             *
-             * { ?subProp rdfs:subPropertyOf ub:worksFor . ?y ?subProp <http://www.Department0.University0.edu> }\n" +
-             "       UNION " +
-             "      { ?y ub:worksFor <http://www.Department0.University0.edu> }
-             */
-//            String s = UUID.randomUUID().toString();
-//            Var subPropVar = new Var(s);
-//            StatementPattern subPropOf = new StatementPattern(subPropVar, new Var("c-" + s, SESAME.DIRECTSUBPROPERTYOF), predVar, EXPANDED);
-//            StatementPattern subPropOf2 = new StatementPattern(sp.getSubjectVar(), subPropVar, objVar, EXPANDED);
-//            InferJoin join = new InferJoin(subPropOf, subPropOf2);
-//            join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
-//            node.replaceWith(join);
-
-//            Collection<URI> parents = inferenceEngine.findParents(inferenceEngine.subPropertyOfGraph, (URI) predVar.getValue());
-//            if (parents != null && parents.size() > 0) {
-//                StatementPatterns statementPatterns = new StatementPatterns();
-//                statementPatterns.patterns.add(node);
-//                Var subjVar = node.getSubjectVar();
-//                for (URI u : parents) {
-//                    statementPatterns.patterns.add(new StatementPattern(subjVar, new Var(predVar.getName(), u), objVar));
-//                }
-//                node.replaceWith(statementPatterns);
-//            }
-//            if (parents != null && parents.size() > 0) {
-//                VarCollection vc = new VarCollection();
-//                vc.setName(predVar.getName());
-//                vc.values.add(predVar);
-//                for (URI u : parents) {
-//                    vc.values.add(new Var(predVar.getName(), u));
-//                }
-//                Var subjVar = node.getSubjectVar();
-//                node.replaceWith(new StatementPattern(subjVar, vc, objVar, node.getContextVar()));
-//            }
-
-            URI subprop_uri = (URI) predVar.getValue();
-            Set<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), subprop_uri);
-            if (parents != null && parents.size() > 0) {
-                String s = UUID.randomUUID().toString();
-                Var typeVar = new Var(s);
-                FixedStatementPattern fsp = new FixedStatementPattern(typeVar, new Var("c-" + s, RDFS.SUBPROPERTYOF), predVar, cntxtVar);
-//                fsp.statements.add(new NullableStatementImpl(subprop_uri, RDFS.SUBPROPERTYOF, subprop_uri));
-                //add self
-                parents.add(subprop_uri);
-                for (URI u : parents) {
-                    fsp.statements.add(new NullableStatementImpl(u, RDFS.SUBPROPERTYOF, subprop_uri));
-                }
-
-                StatementPattern rdfType = new DoNotExpandSP(sp.getSubjectVar(), typeVar, sp.getObjectVar(), cntxtVar);
-                InferJoin join = new InferJoin(fsp, rdfType);
-                join.getProperties().put(InferConstants.INFERRED, InferConstants.TRUE);
-                node.replaceWith(join);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java
deleted file mode 100644
index 59d9f41..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/SymmetricPropertyVisitor.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import org.openrdf.model.URI;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.model.vocabulary.SESAME;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Union;
-import org.openrdf.query.algebra.Var;
-
-/**
- * All predicates are changed
- * Class SubPropertyOfVisitor
- * Date: Mar 29, 2011
- * Time: 11:28:34 AM
- */
-public class SymmetricPropertyVisitor extends AbstractInferVisitor {
-
-    public SymmetricPropertyVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
-        super(conf, inferenceEngine);
-        include = conf.isInferSymmetricProperty();
-    }
-
-    @Override
-    protected void meetSP(StatementPattern node) throws Exception {
-        StatementPattern sp = node.clone();
-
-        final Var predVar = sp.getPredicateVar();
-        URI pred = (URI) predVar.getValue();
-        String predNamespace = pred.getNamespace();
-
-        final Var objVar = sp.getObjectVar();
-        final Var cntxtVar = sp.getContextVar();
-        if (objVar != null &&
-                !RDF.NAMESPACE.equals(predNamespace) &&
-                !SESAME.NAMESPACE.equals(predNamespace) &&
-                !RDFS.NAMESPACE.equals(predNamespace)
-                && !EXPANDED.equals(cntxtVar)) {
-            /**
-             *
-             * { ?a ?pred ?b .}\n" +
-             "       UNION " +
-             "      { ?b ?pred ?a }
-             */
-
-            URI symmPropUri = (URI) predVar.getValue();
-            if(inferenceEngine.isSymmetricProperty(symmPropUri)) {
-                Var subjVar = sp.getSubjectVar();
-                Union union = new InferUnion();
-                union.setLeftArg(sp);
-                union.setRightArg(new StatementPattern(objVar, predVar, subjVar, cntxtVar));
-                node.replaceWith(union);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java
deleted file mode 100644
index 5763c7c..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/TransitivePropertyVisitor.java
+++ /dev/null
@@ -1,68 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
-import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
-import org.openrdf.model.URI;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.model.vocabulary.SESAME;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-
-/**
- * All predicates are changed
- * Class SubPropertyOfVisitor
- * Date: Mar 29, 2011
- * Time: 11:28:34 AM
- */
-public class TransitivePropertyVisitor extends AbstractInferVisitor {
-
-    public TransitivePropertyVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
-        super(conf, inferenceEngine);
-        include = conf.isInferTransitiveProperty();
-    }
-
-    @Override
-    protected void meetSP(StatementPattern node) throws Exception {
-        StatementPattern sp = node.clone();
-        final Var predVar = sp.getPredicateVar();
-
-        URI pred = (URI) predVar.getValue();
-        String predNamespace = pred.getNamespace();
-
-        final Var objVar = sp.getObjectVar();
-        final Var cntxtVar = sp.getContextVar();
-        if (objVar != null &&
-                !RDF.NAMESPACE.equals(predNamespace) &&
-                !SESAME.NAMESPACE.equals(predNamespace) &&
-                !RDFS.NAMESPACE.equals(predNamespace)
-                && !EXPANDED.equals(cntxtVar)) {
-
-            URI transPropUri = (URI) predVar.getValue();
-            if (inferenceEngine.isTransitiveProperty(transPropUri)) {
-                node.replaceWith(new TransitivePropertySP(sp.getSubjectVar(), sp.getPredicateVar(), sp.getObjectVar(), sp.getContextVar()));
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java
deleted file mode 100644
index 7774ce2..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/namespace/NamespaceManager.java
+++ /dev/null
@@ -1,166 +0,0 @@
-package mvm.rya.rdftriplestore.namespace;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.persist.RdfDAOException;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaNamespaceManager;
-import net.sf.ehcache.Cache;
-import net.sf.ehcache.CacheManager;
-import net.sf.ehcache.Element;
-import net.sf.ehcache.Statistics;
-import org.openrdf.model.Namespace;
-import org.openrdf.sail.SailException;
-
-import java.io.InputStream;
-
-/**
- * Class NamespaceManager
- * Date: Oct 17, 2011
- * Time: 8:25:33 AM
- */
-public class NamespaceManager {
-    CacheManager cacheManager;
-    Cache namespaceCache;
-    public static final String NAMESPACE_CACHE_NAME = "namespace";
-    private RdfCloudTripleStoreConfiguration conf;
-    private RyaNamespaceManager namespaceManager;
-
-    public NamespaceManager(RyaDAO ryaDAO, RdfCloudTripleStoreConfiguration conf) {
-        this.conf = conf;
-        initialize(ryaDAO);
-    }
-
-    protected void initialize(RyaDAO ryaDAO) {
-        try {
-            this.namespaceManager = ryaDAO.getNamespaceManager();
-
-            InputStream cacheConfigStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("ehcache.xml");
-            if (cacheConfigStream == null) {
-                this.cacheManager = CacheManager.create();
-//                throw new RuntimeException("Cache Configuration does not exist");
-            } else {
-                this.cacheManager = CacheManager.create(cacheConfigStream);
-            }
-            this.namespaceCache = cacheManager.getCache(NAMESPACE_CACHE_NAME);
-            if (namespaceCache == null) {
-                cacheManager.addCache(NAMESPACE_CACHE_NAME);
-            }
-
-
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    public void shutdown() {
-        if (cacheManager != null) {
-            cacheManager.shutdown();
-            cacheManager = null;
-        }
-    }
-
-    public void addNamespace(String pfx, String namespace) {
-        try {
-            String savedNamespace = getNamespace(pfx);
-            //if the saved ns is the same one being saved, don't do anything
-            if (savedNamespace != null && savedNamespace.equals(namespace)) {
-                return;
-            }
-
-            namespaceCache.put(new Element(pfx, namespace));
-            namespaceManager.addNamespace(pfx, namespace);
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    public String getNamespace(String pfx) {
-        //try in the cache first
-        Element element = namespaceCache.get(pfx);
-        if (element != null) {
-            return (String) element.getValue();
-        }
-
-        try {
-            String namespace = namespaceManager.getNamespace(pfx);
-            if (namespace != null) {
-                namespaceCache.put(new Element(pfx, namespace));
-                return namespace;
-            }
-        } catch (Exception e) {
-            //TODO: print or log?
-        }
-        return null;
-
-    }
-
-    public void removeNamespace(String pfx) {
-        try {
-            namespaceCache.remove(pfx);
-            namespaceManager.removeNamespace(pfx);
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    public CloseableIteration<? extends Namespace, SailException> iterateNamespace() {
-        try {
-            //for this one we will go directly to the store
-            final CloseableIteration<? extends Namespace, RdfDAOException> iteration = namespaceManager.iterateNamespace();
-            return new CloseableIteration<Namespace, SailException>() {
-                @Override
-                public void close() throws SailException {
-                    iteration.close();
-                }
-
-                @Override
-                public boolean hasNext() throws SailException {
-                    return iteration.hasNext();
-                }
-
-                @Override
-                public Namespace next() throws SailException {
-                    return iteration.next();
-                }
-
-                @Override
-                public void remove() throws SailException {
-                    iteration.remove();
-                }
-            };
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    public void printStatistics() {
-        Statistics statistics = namespaceCache.getStatistics();
-        if (statistics != null) { //TODO: use a logger please
-            System.out.println("Namespace Cache Statisitics: ");
-            System.out.println("--Hits: \t" + statistics.getCacheHits());
-            System.out.println("--Misses: \t" + statistics.getCacheMisses());
-            System.out.println("--Total Count: \t" + statistics.getObjectCount());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java
deleted file mode 100644
index 5e73f93..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/CombineContextsRdfInserter.java
+++ /dev/null
@@ -1,164 +0,0 @@
-package mvm.rya.rdftriplestore.utils;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.OpenRDFUtil;
-import org.openrdf.model.*;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.rio.RDFHandlerException;
-import org.openrdf.rio.helpers.RDFHandlerBase;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Created by IntelliJ IDEA.
- * User: RoshanP
- * Date: 3/23/12
- * Time: 9:50 AM
- * To change this template use File | Settings | File Templates.
- */
-public class CombineContextsRdfInserter extends RDFHandlerBase {
-
-    private final RepositoryConnection con;
-    private Resource[] contexts = new Resource[0];
-    private boolean preserveBNodeIDs;
-    private final Map<String, String> namespaceMap;
-    private final Map<String, BNode> bNodesMap;
-
-    public CombineContextsRdfInserter(RepositoryConnection con) {
-        this.con = con;
-        preserveBNodeIDs = true;
-        namespaceMap = new HashMap<String, String>();
-        bNodesMap = new HashMap<String, BNode>();
-    }
-
-    public void setPreserveBNodeIDs(boolean preserveBNodeIDs) {
-        this.preserveBNodeIDs = preserveBNodeIDs;
-    }
-
-    public boolean preservesBNodeIDs() {
-        return preserveBNodeIDs;
-    }
-
-    public void enforceContext(Resource... contexts) {
-        OpenRDFUtil.verifyContextNotNull(contexts);
-        this.contexts = contexts;
-    }
-
-    public boolean enforcesContext() {
-        return contexts.length != 0;
-    }
-
-    public Resource[] getContexts() {
-        return contexts;
-    }
-
-    @Override
-    public void endRDF()
-            throws RDFHandlerException {
-        for (Map.Entry<String, String> entry : namespaceMap.entrySet()) {
-            String prefix = entry.getKey();
-            String name = entry.getValue();
-
-            try {
-                if (con.getNamespace(prefix) == null) {
-                    con.setNamespace(prefix, name);
-                }
-            } catch (RepositoryException e) {
-                throw new RDFHandlerException(e);
-            }
-        }
-
-        namespaceMap.clear();
-        bNodesMap.clear();
-    }
-
-    @Override
-    public void handleNamespace(String prefix, String name) {
-        // FIXME: set namespaces directly when they are properly handled wrt
-        // rollback
-        // don't replace earlier declarations
-        if (prefix != null && !namespaceMap.containsKey(prefix)) {
-            namespaceMap.put(prefix, name);
-        }
-    }
-
-    @Override
-    public void handleStatement(Statement st)
-            throws RDFHandlerException {
-        Resource subj = st.getSubject();
-        URI pred = st.getPredicate();
-        Value obj = st.getObject();
-        Resource ctxt = st.getContext();
-
-        if (!preserveBNodeIDs) {
-            if (subj instanceof BNode) {
-                subj = mapBNode((BNode) subj);
-            }
-
-            if (obj instanceof BNode) {
-                obj = mapBNode((BNode) obj);
-            }
-
-            if (!enforcesContext() && ctxt instanceof BNode) {
-                ctxt = mapBNode((BNode) ctxt);
-            }
-        }
-
-        try {
-            if (enforcesContext()) {
-                Resource[] ctxts = contexts;
-                if (ctxt != null) {
-                    ctxts = combineContexts(contexts, ctxt);
-                }
-                con.add(subj, pred, obj, ctxts);
-            } else {
-                con.add(subj, pred, obj, ctxt);
-            }
-        } catch (RepositoryException e) {
-            throw new RDFHandlerException(e);
-        }
-    }
-
-    private BNode mapBNode(BNode bNode) {
-        BNode result = bNodesMap.get(bNode.getID());
-
-        if (result == null) {
-            result = con.getRepository().getValueFactory().createBNode();
-            bNodesMap.put(bNode.getID(), result);
-        }
-
-        return result;
-    }
-
-    public static Resource[] combineContexts(Resource[] contexts, Resource ctxt) {
-        if (contexts == null || ctxt == null) {
-            throw new IllegalArgumentException("Contexts cannot be null");
-        }
-        int length = contexts.length;
-        Resource[] ret = new Resource[length + 1];
-        System.arraycopy(contexts, 0, ret, 0, length);
-        ret[length] = ctxt;
-        return ret;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java
deleted file mode 100644
index 613b1dd..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/DefaultStatistics.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package mvm.rya.rdftriplestore.utils;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
-
-/**
- * Class DefaultStatistics
- * Date: Apr 12, 2011
- * Time: 1:31:05 PM
- */
-public class DefaultStatistics extends EvaluationStatistics {
-
-    public DefaultStatistics() {
-    }
-
-    @Override
-    protected CardinalityCalculator createCardinalityCalculator() {
-        return new DefaultCardinalityCalculator();
-    }
-
-    public class DefaultCardinalityCalculator extends CardinalityCalculator {
-
-        double count = 0.0;
-
-        @Override
-        protected double getCardinality(StatementPattern sp) {
-            //based on how many (subj, pred, obj) are set
-//            int numSet = 3;
-//            if (sp.getSubjectVar().hasValue()) numSet--;
-//            if (sp.getPredicateVar().hasValue()) numSet--;
-//            if (sp.getObjectVar().hasValue()) numSet--;
-//            return numSet;
-            return count++;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java
deleted file mode 100644
index ee73276..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/FixedStatementPattern.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package mvm.rya.rdftriplestore.utils;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.model.Statement;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-/**
- * StatementPattern gives fixed statements back
- *
- * Class FixedStatementPattern
- * Date: Mar 12, 2012
- * Time: 2:42:06 PM
- */
-public class FixedStatementPattern extends StatementPattern {
-    public Collection<Statement> statements = new ArrayList();
-
-    public FixedStatementPattern() {
-    }
-
-    public FixedStatementPattern(Var subject, Var predicate, Var object) {
-        super(subject, predicate, object);
-    }
-
-    public FixedStatementPattern(Scope scope, Var subject, Var predicate, Var object) {
-        super(scope, subject, predicate, object);
-    }
-
-    public FixedStatementPattern(Var subject, Var predicate, Var object, Var context) {
-        super(subject, predicate, object, context);
-    }
-
-    public FixedStatementPattern(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) {
-        super(scope, subjVar, predVar, objVar, conVar);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java
deleted file mode 100644
index 3cd3993..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/utils/TransitivePropertySP.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package mvm.rya.rdftriplestore.utils;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-
-/**
- * Class TransitivePropertySP
- * Date: Mar 14, 2012
- * Time: 5:23:10 PM
- */
-public class TransitivePropertySP extends StatementPattern {
-
-    public TransitivePropertySP() {
-    }
-
-    public TransitivePropertySP(Var subject, Var predicate, Var object) {
-        super(subject, predicate, object);
-    }
-
-    public TransitivePropertySP(Scope scope, Var subject, Var predicate, Var object) {
-        super(scope, subject, predicate, object);
-    }
-
-    public TransitivePropertySP(Var subject, Var predicate, Var object, Var context) {
-        super(subject, predicate, object, context);
-    }
-
-    public TransitivePropertySP(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) {
-        super(scope, subjVar, predVar, objVar, conVar);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/resources/META-INF/org.openrdf.store.schemas
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/resources/META-INF/org.openrdf.store.schemas b/sail/rya.sail.impl/src/main/resources/META-INF/org.openrdf.store.schemas
deleted file mode 100644
index ad9993f..0000000
--- a/sail/rya.sail.impl/src/main/resources/META-INF/org.openrdf.store.schemas
+++ /dev/null
@@ -1 +0,0 @@
-META-INF/schemas/cloudbasestore-schema.ttl
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl b/sail/rya.sail.impl/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl
deleted file mode 100644
index 708a964..0000000
--- a/sail/rya.sail.impl/src/main/resources/META-INF/schemas/cloudbasestore-schema.ttl
+++ /dev/null
@@ -1,20 +0,0 @@
-@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>.
-@prefix rep: <http://www.openrdf.org/config/repository#>.
-@prefix sr: <http://www.openrdf.org/config/repository/sail#>.
-@prefix sail: <http://www.openrdf.org/config/sail#>.
-@prefix cbs: <http://www.openrdf.org/config/sail/cloudbasestore#>.
-
-[] a rep:Repository ;
-   rep:repositoryID "{%Repository ID|cloudbasestore%}" ;
-   rdfs:label "{%Repository title|Cloudbase store%}" ;
-   rep:repositoryImpl [
-      rep:repositoryType "openrdf:SailRepository" ;
-      sr:sailImpl [
-         sail:sailType "openrdf:RdfCloudTripleStore" ;
-         cbs:server "{%CBSail server|stratus13%}" ;
-         cbs:port "{%CBSail port|2181%}" ;
-         cbs:instance "{%CBSail instance|stratus%}" ;
-         cbs:user "{%CBSail user|root%}" ;
-         cbs:password "{%CBSail password|password%}" ;
-      ]
-   ].
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory b/sail/rya.sail.impl/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory
deleted file mode 100644
index 09a0661..0000000
--- a/sail/rya.sail.impl/src/main/resources/META-INF/services/org.openrdf.sail.config.SailFactory
+++ /dev/null
@@ -1 +0,0 @@
-mvm.rya.rdftriplestore.RdfCloudTripleStoreFactory
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/resources/ehcache.xml
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/resources/ehcache.xml b/sail/rya.sail.impl/src/main/resources/ehcache.xml
deleted file mode 100644
index 5b2bdd2..0000000
--- a/sail/rya.sail.impl/src/main/resources/ehcache.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="../config/ehcache.xsd"
-         updateCheck="false">
-    <diskStore path="java.io.tmpdir"/>
-    <cache name="namespace"
-            maxElementsInMemory="1000"
-            eternal="false"
-            timeToIdleSeconds="3600"
-            timeToLiveSeconds="3600"
-            overflowToDisk="false"
-            diskPersistent="false"
-            memoryStoreEvictionPolicy="FIFO"
-            >
-    </cache>
-    <defaultCache
-            maxElementsInMemory="50000"
-            eternal="false"
-            timeToIdleSeconds="0"
-            timeToLiveSeconds="0"
-            overflowToDisk="true"
-            maxElementsOnDisk="1000000"
-            diskPersistent="true"
-            diskExpiryThreadIntervalSeconds="120"
-            memoryStoreEvictionPolicy="FIFO"
-            >
-    </defaultCache>
-</ehcache>


[44/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/BulkNtripsInputTool.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/BulkNtripsInputTool.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/BulkNtripsInputTool.java
deleted file mode 100644
index c03b124..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/BulkNtripsInputTool.java
+++ /dev/null
@@ -1,318 +0,0 @@
-package mvm.rya.cloudbase.mr.fileinput;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.client.admin.TableOperations;
-import cloudbase.core.client.mapreduce.CloudbaseFileOutputFormat;
-import cloudbase.core.client.mapreduce.lib.partition.RangePartitioner;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.util.TextUtil;
-import com.google.common.base.Preconditions;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolver;
-import mvm.rya.cloudbase.CloudbaseRdfConstants;
-import mvm.rya.cloudbase.mr.utils.MRUtils;
-import mvm.rya.cloudbase.utils.bulk.KeyRangePartitioner;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.model.Statement;
-import org.openrdf.rio.*;
-
-import java.io.BufferedOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.io.StringReader;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Map;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static mvm.rya.cloudbase.CloudbaseRdfUtils.extractValue;
-import static mvm.rya.cloudbase.CloudbaseRdfUtils.from;
-
-/**
- * Take large ntrips files and use MapReduce and Cloudbase
- * Bulk ingest techniques to load into the table in our partition format.
- * <p/>
- * Input: NTrips file
- * Map:
- * - key : shard row - Text
- * - value : stmt in doc triple format - Text
- * Partitioner: RangePartitioner
- * Reduce:
- * - key : all the entries for each triple - Cloudbase Key
- * Class BulkNtripsInputTool
- * Date: Sep 13, 2011
- * Time: 10:00:17 AM
- */
-public class BulkNtripsInputTool extends Configured implements Tool {
-
-    public static final String WORKDIR_PROP = "bulk.n3.workdir";
-
-    private String userName = "root";
-    private String pwd = "password";
-    private String instance = "stratus";
-    private String zk = "10.40.190.129:2181";
-    private String ttl = null;
-    private String workDirBase = "/temp/bulkcb/work";
-    private String format = RDFFormat.NTRIPLES.getName();
-
-    @Override
-    public int run(final String[] args) throws Exception {
-        final Configuration conf = getConf();
-        try {
-            //conf
-            zk = conf.get(MRUtils.CB_ZK_PROP, zk);
-            ttl = conf.get(MRUtils.CB_TTL_PROP, ttl);
-            instance = conf.get(MRUtils.CB_INSTANCE_PROP, instance);
-            userName = conf.get(MRUtils.CB_USERNAME_PROP, userName);
-            pwd = conf.get(MRUtils.CB_PWD_PROP, pwd);
-            workDirBase = conf.get(WORKDIR_PROP, workDirBase);
-            format = conf.get(MRUtils.FORMAT_PROP, format);
-            conf.set(MRUtils.FORMAT_PROP, format);
-            final String inputDir = args[0];
-
-            ZooKeeperInstance zooKeeperInstance = new ZooKeeperInstance(instance, zk);
-            Connector connector = zooKeeperInstance.getConnector(userName, pwd);
-            TableOperations tableOperations = connector.tableOperations();
-
-            String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null);
-            if (tablePrefix != null)
-                RdfCloudTripleStoreConstants.prefixTables(tablePrefix);
-            String[] tables = {tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX,
-                    tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX,
-                    tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX};
-            Collection<Job> jobs = new ArrayList<Job>();
-            for (final String tableName : tables) {
-                PrintStream out = null;
-                try {
-                    String workDir = workDirBase + "/" + tableName;
-                    System.out.println("Loading data into table[" + tableName + "]");
-
-                    Job job = new Job(new Configuration(conf), "Bulk Ingest load data to Generic RDF Table[" + tableName + "]");
-                    job.setJarByClass(this.getClass());
-                    //setting long job
-                    Configuration jobConf = job.getConfiguration();
-                    jobConf.setBoolean("mapred.map.tasks.speculative.execution", false);
-                    jobConf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-                    jobConf.set("io.sort.mb", jobConf.get("io.sort.mb", "256"));
-                    jobConf.setBoolean("mapred.compress.map.output", true);
-//                    jobConf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); //TODO: I would like LZO compression
-
-                    job.setInputFormatClass(TextInputFormat.class);
-
-                    job.setMapperClass(ParseNtripsMapper.class);
-                    job.setMapOutputKeyClass(Key.class);
-                    job.setMapOutputValueClass(Value.class);
-
-                    job.setCombinerClass(OutStmtMutationsReducer.class);
-                    job.setReducerClass(OutStmtMutationsReducer.class);
-                    job.setOutputFormatClass(CloudbaseFileOutputFormat.class);
-                    CloudbaseFileOutputFormat.setZooKeeperInstance(job, instance, zk);
-
-                    jobConf.set(ParseNtripsMapper.TABLE_PROPERTY, tableName);
-
-                    TextInputFormat.setInputPaths(job, new Path(inputDir));
-
-                    FileSystem fs = FileSystem.get(conf);
-                    Path workPath = new Path(workDir);
-                    if (fs.exists(workPath))
-                        fs.delete(workPath, true);
-
-                    CloudbaseFileOutputFormat.setOutputPath(job, new Path(workDir + "/files"));
-
-                    out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir + "/splits.txt"))));
-
-                    if (!tableOperations.exists(tableName))
-                        tableOperations.create(tableName);
-                    Collection<Text> splits = tableOperations.getSplits(tableName, Integer.MAX_VALUE);
-                    for (Text split : splits)
-                        out.println(new String(Base64.encodeBase64(TextUtil.getBytes(split))));
-
-                    job.setNumReduceTasks(splits.size() + 1);
-                    out.close();
-
-                    job.setPartitionerClass(KeyRangePartitioner.class);
-                    RangePartitioner.setSplitFile(job, workDir + "/splits.txt");
-
-                    jobConf.set(WORKDIR_PROP, workDir);
-
-                    job.submit();
-                    jobs.add(job);
-
-                } catch (Exception re) {
-                    throw new RuntimeException(re);
-                } finally {
-                    if (out != null)
-                        out.close();
-                }
-            }
-
-            for (Job job : jobs) {
-                while (!job.isComplete()) {
-                    Thread.sleep(1000);
-                }
-            }
-
-            for (String tableName : tables) {
-                String workDir = workDirBase + "/" + tableName;
-                tableOperations.importDirectory(
-                        tableName,
-                        workDir + "/files",
-                        workDir + "/failures",
-                        20,
-                        4,
-                        false);
-            }
-
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-
-        return 0;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new BulkNtripsInputTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    /**
-     * input: ntrips format triple
-     * <p/>
-     * output: key: shard row from generator
-     * value: stmt in serialized format (document format)
-     */
-    public static class ParseNtripsMapper extends Mapper<LongWritable, Text, Key, Value> {
-        public static final String TABLE_PROPERTY = "parsentripsmapper.table";
-
-        private RDFParser parser;
-        private String rdfFormat;
-        private String namedGraph;
-        private RyaContext ryaContext = RyaContext.getInstance();
-        private TripleRowResolver rowResolver = ryaContext.getTripleResolver();
-
-        @Override
-        protected void setup(final Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            Configuration conf = context.getConfiguration();
-            final String table = conf.get(TABLE_PROPERTY);
-            Preconditions.checkNotNull(table, "Set the " + TABLE_PROPERTY + " property on the map reduce job");
-
-            final String cv_s = conf.get(MRUtils.CB_CV_PROP);
-            final byte[] cv = cv_s == null ? null : cv_s.getBytes();
-            rdfFormat = conf.get(MRUtils.FORMAT_PROP);
-            checkNotNull(rdfFormat, "Rdf format cannot be null");
-
-            namedGraph = conf.get(MRUtils.NAMED_GRAPH_PROP);
-
-            parser = Rio.createParser(RDFFormat.valueOf(rdfFormat));
-    		parser.setParserConfig(new ParserConfig(true, true, true, RDFParser.DatatypeHandling.VERIFY));
-            parser.setRDFHandler(new RDFHandler() {
-
-                @Override
-                public void startRDF() throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void endRDF() throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void handleNamespace(String s, String s1) throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void handleStatement(Statement statement) throws RDFHandlerException {
-                    try {
-                        RyaStatement rs = RdfToRyaConversions.convertStatement(statement);
-                        if(rs.getColumnVisibility() == null) {
-                            rs.setColumnVisibility(cv);
-                        }
-
-                    	// Inject the specified context into the statement.
-                        if(namedGraph != null){
-                            rs.setContext(new RyaURI(namedGraph));
-                        } 
-
-                        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT,TripleRow> serialize = rowResolver.serialize(rs);
-
-                        if (table.contains(RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX)) {
-                            TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO);
-                            context.write(
-                                    from(tripleRow),
-                                    extractValue(tripleRow)
-                            );
-                        } else if (table.contains(RdfCloudTripleStoreConstants.TBL_PO_SUFFIX)) {
-                            TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO);
-                            context.write(
-                                    from(tripleRow),
-                                    extractValue(tripleRow)
-                            );
-                        } else if (table.contains(RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX)) {
-                            TripleRow tripleRow = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP);
-                            context.write(
-                                    from(tripleRow),
-                                    extractValue(tripleRow)
-                            );
-                        } else
-                            throw new IllegalArgumentException("Unrecognized table[" + table + "]");
-
-                    } catch (Exception e) {
-                        throw new RDFHandlerException(e);
-                    }
-                }
-
-                @Override
-                public void handleComment(String s) throws RDFHandlerException {
-
-                }
-            });
-        }
-
-        @Override
-        public void map(LongWritable key, Text value, Context output)
-                throws IOException, InterruptedException {
-            String rdf = value.toString();
-            try {
-                parser.parse(new StringReader(rdf), "");
-            } catch (RDFParseException e) {
-                System.out.println("Line[" + rdf + "] cannot be formatted with format[" + rdfFormat + "]. Exception[" + e.getMessage() + "]");
-            } catch (Exception e) {
-                e.printStackTrace();
-                throw new IOException("Exception occurred parsing triple[" + rdf + "]");
-            }
-        }
-    }
-
-    public static class OutStmtMutationsReducer extends Reducer<Key, Value, Key, Value> {
-
-        public void reduce(Key key, Iterable<Value> values, Context output)
-                throws IOException, InterruptedException {
-            output.write(key, CloudbaseRdfConstants.EMPTY_VALUE);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputByLineTool.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputByLineTool.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputByLineTool.java
deleted file mode 100644
index 5aed4a2..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputByLineTool.java
+++ /dev/null
@@ -1,230 +0,0 @@
-package mvm.rya.cloudbase.mr.fileinput;
-
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.cloudbase.CloudbaseRdfConstants;
-import mvm.rya.cloudbase.RyaTableMutationsFactory;
-import mvm.rya.cloudbase.mr.utils.MRUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.model.Statement;
-import org.openrdf.rio.*;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Map;
-
-/**
- * Do bulk import of rdf files
- * Class RdfFileInputTool2
- * Date: May 16, 2011
- * Time: 3:12:16 PM
- */
-public class RdfFileInputByLineTool implements Tool {
-
-    private Configuration conf = new Configuration();
-
-    private String userName = "root";
-    private String pwd = "password";
-    private String instance = "stratus";
-    private String zk = "10.40.190.113:2181";
-    private String tablePrefix = null;
-    private RDFFormat format = RDFFormat.NTRIPLES;
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new RdfFileInputByLineTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public long runJob(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-        conf.set("io.sort.mb", "256");
-        conf.setLong("mapred.task.timeout", 600000000);
-
-        zk = conf.get(MRUtils.CB_ZK_PROP, zk);
-        instance = conf.get(MRUtils.CB_INSTANCE_PROP, instance);
-        userName = conf.get(MRUtils.CB_USERNAME_PROP, userName);
-        pwd = conf.get(MRUtils.CB_PWD_PROP, pwd);
-        format = RDFFormat.valueOf(conf.get(MRUtils.FORMAT_PROP, RDFFormat.NTRIPLES.getName()));
-
-        String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, RdfCloudTripleStoreConstants.TBL_PRFX_DEF);
-
-        Job job = new Job(conf);
-        job.setJarByClass(RdfFileInputByLineTool.class);
-
-        // set up cloudbase input
-        job.setInputFormatClass(TextInputFormat.class);
-        FileInputFormat.addInputPath(job, new Path(args[0]));
-
-        // set input output of the particular job
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(Mutation.class);
-//        job.setOutputKeyClass(LongWritable.class);
-//        job.setOutputValueClass(StatementWritable.class);
-
-        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-        CloudbaseOutputFormat.setOutputInfo(job, userName, pwd.getBytes(), true, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
-        CloudbaseOutputFormat.setZooKeeperInstance(job, instance, zk);
-
-        // set mapper and reducer classes
-        job.setMapperClass(TextToMutationMapper.class);
-        job.setNumReduceTasks(0);
-//        job.setReducerClass(Reducer.class);
-
-        // set output
-//        Path outputDir = new Path("/temp/sparql-out/testout");
-//        FileSystem dfs = FileSystem.get(outputDir.toUri(), conf);
-//        if (dfs.exists(outputDir))
-//            dfs.deleteMutation(outputDir, true);
-//
-//        FileOutputFormat.setOutputPath(job, outputDir);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    @Override
-    public int run(String[] args) throws Exception {
-        return (int) runJob(args);
-    }
-
-    public static class TextToMutationMapper extends Mapper<LongWritable, Text, Text, Mutation> {
-        protected RDFParser parser;
-        private String prefix;
-        private RDFFormat rdfFormat;
-        protected Text spo_table;
-        private Text po_table;
-        private Text osp_table;
-        private byte[] cv = CloudbaseRdfConstants.EMPTY_CV.getExpression();
-
-        public TextToMutationMapper() {
-        }
-
-        @Override
-        protected void setup(final Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            Configuration conf = context.getConfiguration();
-            prefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null);
-            if (prefix != null) {
-                RdfCloudTripleStoreConstants.prefixTables(prefix);
-            }
-
-            spo_table = new Text(prefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
-            po_table = new Text(prefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
-            osp_table = new Text(prefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
-
-            final String cv_s = conf.get(MRUtils.CB_CV_PROP);
-            if (cv_s != null)
-                cv = cv_s.getBytes();
-
-            rdfFormat = RDFFormat.valueOf(conf.get(MRUtils.FORMAT_PROP, RDFFormat.NTRIPLES.toString()));
-            parser = Rio.createParser(rdfFormat);
-            final RyaTableMutationsFactory mut = new RyaTableMutationsFactory();
-
-            parser.setRDFHandler(new RDFHandler() {
-
-                @Override
-                public void startRDF() throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void endRDF() throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void handleNamespace(String s, String s1) throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void handleStatement(Statement statement) throws RDFHandlerException {
-                    try {
-                        RyaStatement ryaStatement = RdfToRyaConversions.convertStatement(statement);
-                        if(ryaStatement.getColumnVisibility() == null) {
-                            ryaStatement.setColumnVisibility(cv);
-                        }
-                        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> mutationMap =
-                                mut.serialize(ryaStatement);
-                        Collection<Mutation> spo = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO);
-                        Collection<Mutation> po = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO);
-                        Collection<Mutation> osp = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP);
-
-                        for (Mutation m : spo) {
-                            context.write(spo_table, m);
-                        }
-                        for (Mutation m : po) {
-                            context.write(po_table, m);
-                        }
-                        for (Mutation m : osp) {
-                            context.write(osp_table, m);
-                        }
-                    } catch (Exception e) {
-                        throw new RDFHandlerException(e);
-                    }
-                }
-
-                @Override
-                public void handleComment(String s) throws RDFHandlerException {
-
-                }
-            });
-        }
-
-        @Override
-        protected void map(LongWritable key, Text value, final Context context) throws IOException, InterruptedException {
-            try {
-                parser.parse(new StringReader(value.toString()), "");
-            } catch (Exception e) {
-                throw new IOException(e);
-            }
-        }
-
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputFormat.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputFormat.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputFormat.java
deleted file mode 100644
index 54f9a13..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputFormat.java
+++ /dev/null
@@ -1,115 +0,0 @@
-package mvm.rya.cloudbase.mr.fileinput;
-
-import mvm.rya.api.domain.utils.RyaStatementWritable;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.cloudbase.mr.utils.MRUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.openrdf.model.Statement;
-import org.openrdf.rio.*;
-
-import java.io.IOException;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-/**
- * Be able to input multiple rdf formatted files. Convert from rdf format to statements.
- * Class RdfFileInputFormat
- * Date: May 16, 2011
- * Time: 2:11:24 PM
- */
-public class RdfFileInputFormat extends FileInputFormat<LongWritable, RyaStatementWritable> {
-
-    @Override
-    public RecordReader<LongWritable, RyaStatementWritable> createRecordReader(InputSplit inputSplit,
-                                                                               TaskAttemptContext taskAttemptContext)
-            throws IOException, InterruptedException {
-        return new RdfFileRecordReader();
-    }
-
-    private class RdfFileRecordReader extends RecordReader<LongWritable, RyaStatementWritable> implements RDFHandler {
-
-        boolean closed = false;
-        long count = 0;
-        BlockingQueue<RyaStatementWritable> queue = new LinkedBlockingQueue<RyaStatementWritable>();
-        int total = 0;
-
-        @Override
-        public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
-            FileSplit fileSplit = (FileSplit) inputSplit;
-            Configuration conf = taskAttemptContext.getConfiguration();
-            String rdfForm_s = conf.get(MRUtils.FORMAT_PROP, RDFFormat.RDFXML.getName()); //default to RDF/XML
-            RDFFormat rdfFormat = RDFFormat.valueOf(rdfForm_s);
-
-            Path file = fileSplit.getPath();
-            FileSystem fs = file.getFileSystem(conf);
-            FSDataInputStream fileIn = fs.open(fileSplit.getPath());
-
-            RDFParser rdfParser = Rio.createParser(rdfFormat);
-            rdfParser.setRDFHandler(this);
-            try {
-                rdfParser.parse(fileIn, "");
-            } catch (Exception e) {
-                throw new IOException(e);
-            }
-            fileIn.close();
-            total = queue.size();
-            //TODO: Make this threaded so that you don't hold too many statements before sending them
-        }
-
-        @Override
-        public boolean nextKeyValue() throws IOException, InterruptedException {
-            return queue.size() > 0;
-        }
-
-        @Override
-        public LongWritable getCurrentKey() throws IOException, InterruptedException {
-            return new LongWritable(count++);
-        }
-
-        @Override
-        public RyaStatementWritable getCurrentValue() throws IOException, InterruptedException {
-            return queue.poll();
-        }
-
-        @Override
-        public float getProgress() throws IOException, InterruptedException {
-            return ((float) (total - queue.size())) / ((float) total);
-        }
-
-        @Override
-        public void close() throws IOException {
-            closed = true;
-        }
-
-        @Override
-        public void startRDF() throws RDFHandlerException {
-        }
-
-        @Override
-        public void endRDF() throws RDFHandlerException {
-        }
-
-        @Override
-        public void handleNamespace(String s, String s1) throws RDFHandlerException {
-        }
-
-        @Override
-        public void handleStatement(Statement statement) throws RDFHandlerException {
-            queue.add(new RyaStatementWritable(RdfToRyaConversions.convertStatement(statement)));
-        }
-
-        @Override
-        public void handleComment(String s) throws RDFHandlerException {
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputTool.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputTool.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputTool.java
deleted file mode 100644
index f48cbae..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/RdfFileInputTool.java
+++ /dev/null
@@ -1,185 +0,0 @@
-package mvm.rya.cloudbase.mr.fileinput;
-
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.utils.RyaStatementWritable;
-import mvm.rya.cloudbase.CloudbaseRdfConstants;
-import mvm.rya.cloudbase.RyaTableMutationsFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.rio.RDFFormat;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Map;
-
-import static mvm.rya.cloudbase.mr.utils.MRUtils.*;
-
-/**
- * Do bulk import of rdf files
- * Class RdfFileInputTool
- * Date: May 16, 2011
- * Time: 3:12:16 PM
- */
-public class RdfFileInputTool implements Tool {
-
-    private Configuration conf;
-
-    private String userName = "root";
-    private String pwd = "password";
-    private String instance = "stratus";
-    private String zk = "10.40.190.113:2181";
-    private String tablePrefix = RdfCloudTripleStoreConstants.TBL_PRFX_DEF;
-    private String format = RDFFormat.RDFXML.getName();
-
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new RdfFileInputTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public long runJob(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
-        //faster
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-
-        zk = conf.get(CB_ZK_PROP, zk);
-        instance = conf.get(CB_INSTANCE_PROP, instance);
-        userName = conf.get(CB_USERNAME_PROP, userName);
-        pwd = conf.get(CB_PWD_PROP, pwd);
-
-        tablePrefix = conf.get(TABLE_PREFIX_PROPERTY, tablePrefix);
-        format = conf.get(FORMAT_PROP, format);
-        conf.set(FORMAT_PROP, format);
-
-        Job job = new Job(conf);
-        job.setJarByClass(RdfFileInputTool.class);
-
-        // set up cloudbase input
-        job.setInputFormatClass(RdfFileInputFormat.class);
-        RdfFileInputFormat.addInputPath(job, new Path(args[0]));
-
-        // set input output of the particular job
-        job.setMapOutputKeyClass(LongWritable.class);
-        job.setMapOutputValueClass(RyaStatementWritable.class);
-//        job.setOutputKeyClass(LongWritable.class);
-//        job.setOutputValueClass(StatementWritable.class);
-
-        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-        CloudbaseOutputFormat.setOutputInfo(job, userName, pwd.getBytes(), true, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
-        CloudbaseOutputFormat.setZooKeeperInstance(job, instance, zk);
-
-        // set mapper and reducer classes
-        job.setMapperClass(StatementToMutationMapper.class);
-        job.setNumReduceTasks(0);
-//        job.setReducerClass(Reducer.class);
-
-        // set output
-//        Path outputDir = new Path("/temp/sparql-out/testout");
-//        FileSystem dfs = FileSystem.get(outputDir.toUri(), conf);
-//        if (dfs.exists(outputDir))
-//            dfs.deleteMutation(outputDir, true);
-//
-//        FileOutputFormat.setOutputPath(job, outputDir);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    @Override
-    public int run(String[] args) throws Exception {
-        runJob(args);
-        return 0;
-    }
-
-    public static class StatementToMutationMapper extends Mapper<LongWritable, RyaStatementWritable, Text, Mutation> {
-        protected String tablePrefix;
-        protected Text spo_table;
-        protected Text po_table;
-        protected Text osp_table;
-        private byte[] cv = CloudbaseRdfConstants.EMPTY_CV.getExpression();
-        RyaTableMutationsFactory mut = new RyaTableMutationsFactory();
-
-        public StatementToMutationMapper() {
-        }
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            Configuration conf = context.getConfiguration();
-            tablePrefix = conf.get(TABLE_PREFIX_PROPERTY, RdfCloudTripleStoreConstants.TBL_PRFX_DEF);
-            spo_table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
-            po_table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
-            osp_table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
-
-            final String cv_s = conf.get(CB_CV_PROP);
-            if (cv_s != null)
-                cv = cv_s.getBytes();
-        }
-
-        @Override
-        protected void map(LongWritable key, RyaStatementWritable value, Context context) throws IOException, InterruptedException {
-            RyaStatement statement = value.getRyaStatement();
-            if (statement.getColumnVisibility() == null) {
-                statement.setColumnVisibility(cv);
-            }
-            Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> mutationMap =
-                    mut.serialize(statement);
-            Collection<Mutation> spo = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO);
-            Collection<Mutation> po = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO);
-            Collection<Mutation> osp = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP);
-
-            for (Mutation m : spo) {
-                context.write(spo_table, m);
-            }
-            for (Mutation m : po) {
-                context.write(po_table, m);
-            }
-            for (Mutation m : osp) {
-                context.write(osp_table, m);
-            }
-        }
-
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/ShardedBulkNtripsInputTool.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/ShardedBulkNtripsInputTool.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/ShardedBulkNtripsInputTool.java
deleted file mode 100644
index 5d7d971..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/fileinput/ShardedBulkNtripsInputTool.java
+++ /dev/null
@@ -1,314 +0,0 @@
-//package mvm.rya.cloudbase.mr.fileinput;
-//
-//import cloudbase.core.client.Connector;
-//import cloudbase.core.client.ZooKeeperInstance;
-//import cloudbase.core.client.admin.TableOperations;
-//import cloudbase.core.client.mapreduce.CloudbaseFileOutputFormat;
-//import cloudbase.core.client.mapreduce.lib.partition.RangePartitioner;
-//import cloudbase.core.data.Key;
-//import cloudbase.core.data.Value;
-//import cloudbase.core.util.TextUtil;
-//import com.google.common.base.Preconditions;
-//import mvm.rya.api.RdfCloudTripleStoreConstants;
-//import mvm.rya.cloudbase.CloudbaseRdfConstants;
-//import mvm.rya.cloudbase.RyaTableKeyValues;
-//import mvm.rya.cloudbase.mr.utils.MRUtils;
-//import mvm.rya.cloudbase.utils.bulk.KeyRangePartitioner;
-//import mvm.rya.cloudbase.utils.shard.HashAlgorithm;
-//import mvm.rya.cloudbase.utils.shard.HashCodeHashAlgorithm;
-//import org.apache.commons.codec.binary.Base64;
-//import org.apache.hadoop.conf.Configuration;
-//import org.apache.hadoop.conf.Configured;
-//import org.apache.hadoop.fs.FileSystem;
-//import org.apache.hadoop.fs.Path;
-//import org.apache.hadoop.io.LongWritable;
-//import org.apache.hadoop.io.Text;
-//import org.apache.hadoop.mapreduce.Job;
-//import org.apache.hadoop.mapreduce.Mapper;
-//import org.apache.hadoop.mapreduce.Reducer;
-//import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-//import org.apache.hadoop.util.Tool;
-//import org.apache.hadoop.util.ToolRunner;
-//import org.openrdf.model.Resource;
-//import org.openrdf.model.Statement;
-//import org.openrdf.rio.*;
-//
-//import java.io.BufferedOutputStream;
-//import java.io.IOException;
-//import java.io.PrintStream;
-//import java.io.StringReader;
-//import java.util.ArrayList;
-//import java.util.Collection;
-//import java.util.Map;
-//
-//import static com.google.common.base.Preconditions.checkNotNull;
-//
-///**
-//* Take large ntrips files and use MapReduce and Cloudbase
-//* Bulk ingest techniques to load into the table in our partition format.
-//* Uses a sharded scheme
-//* <p/>
-//* Input: NTrips file
-//* Map:
-//* - key : shard row - Text
-//* - value : stmt in doc triple format - Text
-//* Partitioner: RangePartitioner
-//* Reduce:
-//* - key : all the entries for each triple - Cloudbase Key
-//* Class BulkNtripsInputTool
-//* Date: Sep 13, 2011
-//* Time: 10:00:17 AM
-//*/
-//public class ShardedBulkNtripsInputTool extends Configured implements Tool {
-//
-//    public static final String WORKDIR_PROP = "bulk.n3.workdir";
-//    public static final String BULK_N3_NUMSHARD = "bulk.n3.numshard";
-//
-//    private String userName = "root";
-//    private String pwd = "password";
-//    private String instance = "stratus";
-//    private String zk = "10.40.190.129:2181";
-//    private String ttl = null;
-//    private String workDirBase = "/temp/bulkcb/work";
-//    private String format = RDFFormat.NTRIPLES.getName();
-//    private int numShards;
-//
-//    @Override
-//    public int run(final String[] args) throws Exception {
-//        final Configuration conf = getConf();
-//        try {
-//            //conf
-//            zk = conf.get(MRUtils.CB_ZK_PROP, zk);
-//            ttl = conf.get(MRUtils.CB_TTL_PROP, ttl);
-//            instance = conf.get(MRUtils.CB_INSTANCE_PROP, instance);
-//            userName = conf.get(MRUtils.CB_USERNAME_PROP, userName);
-//            pwd = conf.get(MRUtils.CB_PWD_PROP, pwd);
-//            workDirBase = conf.get(WORKDIR_PROP, workDirBase);
-//            format = conf.get(MRUtils.FORMAT_PROP, format);
-//            String numShards_s = conf.get(BULK_N3_NUMSHARD);
-//            Preconditions.checkArgument(numShards_s != null);
-//            numShards = Integer.parseInt(numShards_s);
-//            conf.set(MRUtils.FORMAT_PROP, format);
-//            final String inputDir = args[0];
-//
-//            ZooKeeperInstance zooKeeperInstance = new ZooKeeperInstance(instance, zk);
-//            Connector connector = zooKeeperInstance.getConnector(userName, pwd);
-//            TableOperations tableOperations = connector.tableOperations();
-//
-//            String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null);
-//            if (tablePrefix != null)
-//                RdfCloudTripleStoreConstants.prefixTables(tablePrefix);
-//            String[] tables = {tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX,
-//                    tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX,
-//                    tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX};
-//            Collection<Job> jobs = new ArrayList<Job>();
-//            for (final String table : tables) {
-//                for (int i = 0; i < numShards; i++) {
-//                    final String tableName = table + i;
-//                    PrintStream out = null;
-//                    try {
-//                        String workDir = workDirBase + "/" + tableName;
-//                        System.out.println("Loading data into table[" + tableName + "]");
-//
-//                        Job job = new Job(new Configuration(conf), "Bulk Ingest load data to Generic RDF Table[" + tableName + "]");
-//                        job.setJarByClass(this.getClass());
-//                        //setting long job
-//                        job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", false);
-//                        job.getConfiguration().setBoolean("mapred.reduce.tasks.speculative.execution", false);
-//                        job.getConfiguration().set("io.sort.mb", "256");
-//                        job.getConfiguration().setBoolean("mapred.compress.map.output", true);
-//                        job.getConfiguration().set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); //TODO: I would like LZO compression
-//
-//                        job.setInputFormatClass(TextInputFormat.class);
-//
-//                        job.setMapperClass(ShardedParseNtripsMapper.class);
-//                        job.setMapOutputKeyClass(Key.class);
-//                        job.setMapOutputValueClass(Value.class);
-//
-//                        job.setCombinerClass(OutStmtMutationsReducer.class);
-//                        job.setReducerClass(OutStmtMutationsReducer.class);
-//                        job.setOutputFormatClass(CloudbaseFileOutputFormat.class);
-//                        CloudbaseFileOutputFormat.setZooKeeperInstance(job, instance, zk);
-//
-//                        job.getConfiguration().set(ShardedParseNtripsMapper.TABLE_PROPERTY, tableName);
-//                        job.getConfiguration().set(ShardedParseNtripsMapper.SHARD_PROPERTY, i + "");
-//
-//                        TextInputFormat.setInputPaths(job, new Path(inputDir));
-//
-//                        FileSystem fs = FileSystem.get(conf);
-//                        Path workPath = new Path(workDir);
-//                        if (fs.exists(workPath))
-//                            fs.deleteMutation(workPath, true);
-//
-//                        CloudbaseFileOutputFormat.setOutputPath(job, new Path(workDir + "/files"));
-//
-//                        out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir + "/splits.txt"))));
-//
-//                        if (!tableOperations.exists(tableName))
-//                            tableOperations.create(tableName);
-//                        Collection<Text> splits = tableOperations.getSplits(tableName, Integer.MAX_VALUE);
-//                        for (Text split : splits)
-//                            out.println(new String(Base64.encodeBase64(TextUtil.getBytes(split))));
-//
-//                        job.setNumReduceTasks(splits.size() + 1);
-//                        out.close();
-//
-//                        job.setPartitionerClass(KeyRangePartitioner.class);
-//                        RangePartitioner.setSplitFile(job, workDir + "/splits.txt");
-//
-//                        job.getConfiguration().set(WORKDIR_PROP, workDir);
-//
-//                        job.submit();
-//                        jobs.add(job);
-//
-//                    } catch (Exception re) {
-//                        throw new RuntimeException(re);
-//                    } finally {
-//                        if (out != null)
-//                            out.close();
-//                    }
-//                }
-//            }
-//
-//            for (Job job : jobs) {
-//                while (!job.isComplete()) {
-//                    Thread.sleep(1000);
-//                }
-//            }
-//
-//            for (String table : tables) {
-//                for (int i = 0; i < numShards; i++) {
-//                    final String tableName = table + i;
-//                    String workDir = workDirBase + "/" + tableName;
-//                    tableOperations.importDirectory(
-//                            tableName,
-//                            workDir + "/files",
-//                            workDir + "/failures",
-//                            20,
-//                            4,
-//                            false);
-//                }
-//            }
-//
-//        } catch (Exception e) {
-//            throw new RuntimeException(e);
-//        }
-//
-//        return 0;
-//    }
-//
-//    public static void main(String[] args) {
-//        try {
-//            ToolRunner.run(new Configuration(), new ShardedBulkNtripsInputTool(), args);
-//        } catch (Exception e) {
-//            e.printStackTrace();
-//        }
-//    }
-//
-//    /**
-//     * input: ntrips format triple
-//     * <p/>
-//     * output: key: shard row from generator
-//     * value: stmt in serialized format (document format)
-//     */
-//    public static class ShardedParseNtripsMapper extends Mapper<LongWritable, Text, Key, Value> {
-//        public static final String TABLE_PROPERTY = "shardedparsentripsmapper.table";
-//        public static final String SHARD_PROPERTY = "shardedparsentripsmapper.shard";
-//
-//        private RDFParser parser;
-//        private String rdfFormat;
-//        private HashAlgorithm hashAlgorithm = new HashCodeHashAlgorithm();
-//        private int shard;
-//        private int numShards;
-//
-//        @Override
-//        protected void setup(final Context context) throws IOException, InterruptedException {
-//            super.setup(context);
-//            Configuration conf = context.getConfiguration();
-//            final String table = conf.get(TABLE_PROPERTY);
-//            Preconditions.checkNotNull(table, "Set the " + TABLE_PROPERTY + " property on the map reduce job");
-//
-//            String shard_s = conf.get(SHARD_PROPERTY);
-//            Preconditions.checkNotNull(shard_s, "Set the " + SHARD_PROPERTY + " property");
-//            shard = Integer.parseInt(shard_s);
-//
-//            numShards = Integer.parseInt(conf.get(BULK_N3_NUMSHARD));
-//
-//            final String cv_s = conf.get(MRUtils.CB_CV_PROP);
-//            rdfFormat = conf.get(MRUtils.FORMAT_PROP);
-//            checkNotNull(rdfFormat, "Rdf format cannot be null");
-//
-//            parser = Rio.createParser(RDFFormat.valueOf(rdfFormat));
-//            parser.setRDFHandler(new RDFHandler() {
-//
-//                @Override
-//                public void startRDF() throws RDFHandlerException {
-//
-//                }
-//
-//                @Override
-//                public void endRDF() throws RDFHandlerException {
-//
-//                }
-//
-//                @Override
-//                public void handleNamespace(String s, String s1) throws RDFHandlerException {
-//
-//                }
-//
-//                @Override
-//                public void handleStatement(Statement statement) throws RDFHandlerException {
-//                    try {
-//                        Resource subject = statement.getSubject();
-//                        if ((hashAlgorithm.hash(subject.stringValue()) % numShards) != shard) {
-//                            return;
-//                        }
-//                        RyaTableKeyValues rdfTableKeyValues = new RyaTableKeyValues(subject, statement.getPredicate(), statement.getObject(), cv_s, statement.getContext()).invoke();
-//                        Collection<Map.Entry<Key, Value>> entries = null;
-//                        if (table.contains(RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX)) {
-//                            entries = rdfTableKeyValues.getSpo();
-//                        } else if (table.contains(RdfCloudTripleStoreConstants.TBL_PO_SUFFIX)) {
-//                            entries = rdfTableKeyValues.getPo();
-//                        } else if (table.contains(RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX)) {
-//                            entries = rdfTableKeyValues.getOsp();
-//                        } else
-//                            throw new IllegalArgumentException("Unrecognized table[" + table + "]");
-//
-//                        for (Map.Entry<Key, Value> entry : entries) {
-//                            context.write(entry.getKey(), entry.getValue());
-//                        }
-//                    } catch (Exception e) {
-//                        throw new RDFHandlerException(e);
-//                    }
-//                }
-//
-//                @Override
-//                public void handleComment(String s) throws RDFHandlerException {
-//
-//                }
-//            });
-//        }
-//
-//        @Override
-//        public void map(LongWritable key, Text value, Context output)
-//                throws IOException, InterruptedException {
-//            String rdf = value.toString();
-//            try {
-//                parser.parse(new StringReader(rdf), "");
-//            } catch (RDFParseException e) {
-//                System.out.println("Line[" + rdf + "] cannot be formatted with format[" + rdfFormat + "]. Exception[" + e.getMessage() + "]");
-//            } catch (Exception e) {
-//                e.printStackTrace();
-//                throw new IOException("Exception occurred parsing triple[" + rdf + "]");
-//            }
-//        }
-//    }
-//
-//    public static class OutStmtMutationsReducer extends Reducer<Key, Value, Key, Value> {
-//
-//        public void reduce(Key key, Iterable<Value> values, Context output)
-//                throws IOException, InterruptedException {
-//            output.write(key, CloudbaseRdfConstants.EMPTY_VALUE);
-//        }
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/upgrade/UpgradeCloudbaseRdfTables.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/upgrade/UpgradeCloudbaseRdfTables.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/upgrade/UpgradeCloudbaseRdfTables.java
deleted file mode 100644
index 453d6ca..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/upgrade/UpgradeCloudbaseRdfTables.java
+++ /dev/null
@@ -1,350 +0,0 @@
-//package mvm.rya.cloudbase.mr.upgrade;
-//
-//import cloudbase.core.client.Connector;
-//import cloudbase.core.client.ZooKeeperInstance;
-//import cloudbase.core.client.admin.TableOperations;
-//import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-//import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-//import cloudbase.core.data.Key;
-//import cloudbase.core.data.Mutation;
-//import cloudbase.core.data.Range;
-//import cloudbase.core.data.Value;
-//import cloudbase.core.security.Authorizations;
-//import cloudbase.core.security.ColumnVisibility;
-//import cloudbase.core.util.Pair;
-//import com.google.common.collect.Lists;
-//import com.google.common.io.ByteArrayDataInput;
-//import com.google.common.io.ByteArrayDataOutput;
-//import com.google.common.io.ByteStreams;
-//import mvm.rya.api.InvalidValueTypeMarkerRuntimeException;
-//import mvm.rya.api.RdfCloudTripleStoreConstants;
-//import mvm.rya.cloudbase.CloudbaseRdfConfiguration;
-//import mvm.rya.cloudbase.CloudbaseRdfConstants;
-//import mvm.rya.cloudbase.CloudbaseRyaDAO;
-//import mvm.rya.cloudbase.RyaTableMutationsFactory;
-//import mvm.rya.cloudbase.mr.utils.MRUtils;
-//import org.apache.hadoop.conf.Configuration;
-//import org.apache.hadoop.conf.Configured;
-//import org.apache.hadoop.io.Text;
-//import org.apache.hadoop.mapreduce.Job;
-//import org.apache.hadoop.mapreduce.Mapper;
-//import org.apache.hadoop.util.Tool;
-//import org.apache.hadoop.util.ToolRunner;
-//import org.openrdf.model.*;
-//import org.openrdf.model.impl.StatementImpl;
-//import org.openrdf.model.impl.ValueFactoryImpl;
-//
-//import java.io.IOException;
-//import java.util.ArrayList;
-//import java.util.Collection;
-//import java.util.Date;
-//import java.util.Map;
-//
-//import static mvm.rya.api.RdfCloudTripleStoreUtils.*;
-//
-///**
-// * 1. Check version. <br/>
-// * 2. If version does not exist, apply: <br/>
-// * - DELIM => 1 -> 0
-// * - DELIM_STOP => 2 -> 1
-// * - 3 table index
-// */
-//public class UpgradeCloudbaseRdfTables extends Configured implements Tool {
-//    public static final String TMP = "_tmp";
-//    public static final String DELETE_PROP = "rdf.upgrade.deleteMutation"; //true if ok to deleteMutation old tables
-//    private String zk = "10.40.190.113:2181";
-//    private String instance = "stratus";
-//    private String userName = "root";
-//    private String pwd = "password";
-//    private String tablePrefix = RdfCloudTripleStoreConstants.TBL_PRFX_DEF;
-//    private CloudbaseRdfConfiguration conf = new CloudbaseRdfConfiguration();
-//
-//    @Override
-//    public int run(String[] strings) throws Exception {
-//        conf = new CloudbaseRdfConfiguration(getConf());
-//        //faster
-//        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-//        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-//        conf.set(MRUtils.JOB_NAME_PROP, "Upgrading Cloudbase Rdf Tables");
-//
-//        zk = conf.get(MRUtils.CB_ZK_PROP, zk);
-//        instance = conf.get(MRUtils.CB_INSTANCE_PROP, instance);
-//        userName = conf.get(MRUtils.CB_USERNAME_PROP, userName);
-//        pwd = conf.get(MRUtils.CB_PWD_PROP, pwd);
-//
-//        tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, tablePrefix);
-//
-//        Authorizations authorizations = CloudbaseRdfConstants.ALL_AUTHORIZATIONS;
-//        String auth = conf.get(MRUtils.CB_AUTH_PROP);
-//        if (auth != null)
-//            authorizations = new Authorizations(auth.split(","));
-//
-//        boolean deleteTables = conf.getBoolean(DELETE_PROP, false);
-//
-//        //tables
-//        String spo = tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX;
-//        String po = tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX;
-//        String osp = tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX;
-//        String so = tablePrefix + "so";
-//        String ops = tablePrefix + "o";
-//
-//        //check version first
-//        Connector connector = new ZooKeeperInstance(instance, zk).getConnector(userName, pwd.getBytes());
-//        CloudbaseRyaDAO rdfDAO = new CloudbaseRyaDAO();
-//        rdfDAO.setConnector(connector);
-//        conf.setTablePrefix(tablePrefix);
-//        rdfDAO.setConf(conf);
-////        rdfDAO.setSpoTable(spo);
-////        rdfDAO.setPoTable(po);
-////        rdfDAO.setOspTable(osp);
-////        rdfDAO.setNamespaceTable(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX);
-//        rdfDAO.init();
-//        String version = rdfDAO.getVersion();
-//        if (version != null) {
-//            //TODO: Do a version check here
-//            //version found, no need to upgrade
-//            return 0;
-//        }
-//
-//        rdfDAO.destroy();
-//
-//        //create osp table, deleteMutation so and o tables
-//        TableOperations tableOperations = connector.tableOperations();
-//        if (deleteTables) {
-//            if (tableOperations.exists(so)) {
-//                tableOperations.deleteMutation(so);
-//            }
-//            if (tableOperations.exists(ops)) {
-//                tableOperations.deleteMutation(ops);
-//            }
-//        }
-//
-//        conf.set("io.sort.mb", "256");
-//        Job job = new Job(conf);
-//        job.setJarByClass(UpgradeCloudbaseRdfTables.class);
-//
-//        //set up cloudbase input
-//        job.setInputFormatClass(CloudbaseInputFormat.class);
-//        CloudbaseInputFormat.setInputInfo(job, userName, pwd.getBytes(), spo, authorizations);
-//        CloudbaseInputFormat.setZooKeeperInstance(job, instance, zk);
-//        Collection<Pair<Text, Text>> columns = new ArrayList<Pair<Text, Text>>();
-//        final Pair pair = new Pair(RdfCloudTripleStoreConstants.INFO_TXT, RdfCloudTripleStoreConstants.INFO_TXT);
-//        columns.add(pair);
-//        CloudbaseInputFormat.fetchColumns(job, columns);
-//
-//        CloudbaseInputFormat.setRanges(job, Lists.newArrayList(new Range(new Text(new byte[]{}), new Text(new byte[]{Byte.MAX_VALUE}))));
-//
-//        // set input output of the particular job
-//        job.setMapOutputKeyClass(Text.class);
-//        job.setMapOutputValueClass(Mutation.class);
-//
-//        //no reducer needed?
-//        job.setNumReduceTasks(0);
-//        job.setMapperClass(UpgradeCloudbaseRdfTablesMapper.class);
-//
-//        CloudbaseOutputFormat.setOutputInfo(job, userName, pwd.getBytes(), true, spo + TMP);
-//        CloudbaseOutputFormat.setZooKeeperInstance(job, instance, zk);
-//        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-//
-//        // Submit the job
-//        Date startTime = new Date();
-//        System.out.println("Job started: " + startTime);
-//        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-//
-//        if (exitCode == 0) {
-//            Date end_time = new Date();
-//            System.out.println("Job ended: " + end_time);
-//            System.out.println("The job took "
-//                    + (end_time.getTime() - startTime.getTime()) / 1000
-//                    + " seconds.");
-//
-//            //now deleteMutation old spo table, and rename tmp one
-//            if (deleteTables) {
-//                tableOperations.deleteMutation(spo);
-//                tableOperations.rename(spo + TMP, spo);
-//                tableOperations.deleteMutation(po);
-//                tableOperations.rename(po + TMP, po);
-//                tableOperations.deleteMutation(osp);
-//                tableOperations.rename(osp + TMP, osp);
-//            }
-//
-//            return 0;
-//        } else {
-//            System.out.println("Job Failed!!!");
-//        }
-//
-//        return -1;
-//    }
-//
-//    public static void main(String[] args) {
-//        try {
-//            ToolRunner.run(new Configuration(), new UpgradeCloudbaseRdfTables(), args);
-//        } catch (Exception e) {
-//            e.printStackTrace();
-//        }
-//    }
-//
-//    public static class UpgradeCloudbaseRdfTablesMapper extends Mapper<Key, Value, Text, Mutation> {
-//        private String tablePrefix = RdfCloudTripleStoreConstants.TBL_PRFX_DEF;
-//        ValueFactoryImpl vf = new ValueFactoryImpl();
-//
-//        private Text spo_table, po_table, osp_table;
-//
-//        RyaTableMutationsFactory mut = new RyaTableMutationsFactory();
-//
-//        @Override
-//        protected void setup(Context context) throws IOException, InterruptedException {
-//            super.setup(context);
-//            Configuration conf = context.getConfiguration();
-//            tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, tablePrefix);
-//            String spo = tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX + TMP;
-//            String po = tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX + TMP;
-//            String osp = tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX + TMP;
-//
-//            spo_table = new Text(spo);
-//            po_table = new Text(po);
-//            osp_table = new Text(osp);
-//        }
-//
-//        @Override
-//        protected void map(Key key, Value value, Context context) throws IOException, InterruptedException {
-//            //read in old format
-//            Statement statement = null;
-//            try {
-//                statement = translateOldStatementFromRow(ByteStreams.newDataInput(key.getRow().getBytes()), "spo", vf);
-//            } catch (Exception e) {
-//                //not the right version
-//                return;
-//            }
-//
-//            //translate to new format and save in new tables
-//            Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Mutation> mutationMap = mut.serialize(statement.getSubject(), statement.getPredicate(), statement.getObject(), new ColumnVisibility(key.getColumnVisibility()), statement.getContext());
-//            Mutation spo = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO);
-//            Mutation po = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO);
-//            Mutation osp = mutationMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP);
-//
-//            context.write(spo_table, spo);
-//            context.write(po_table, po);
-//            context.write(osp_table, osp);
-//
-//            //TODO: Contexts
-//        }
-//    }
-//
-//    public static org.openrdf.model.Value readOldValue(ByteArrayDataInput dataIn, ValueFactory vf)
-//            throws IOException, ClassCastException {
-//        int valueTypeMarker;
-//        try {
-//            valueTypeMarker = dataIn.readByte();
-//        } catch (Exception e) {
-//            return null;
-//        }
-//
-//        org.openrdf.model.Value ret = null;
-//        if (valueTypeMarker == RdfCloudTripleStoreConstants.URI_MARKER) {
-//            String uriString = readString(dataIn);
-//            ret = vf.createURI(uriString);
-//        } else if (valueTypeMarker == RdfCloudTripleStoreConstants.BNODE_MARKER) {
-//            String bnodeID = readString(dataIn);
-//            ret = vf.createBNode(bnodeID);
-//        } else if (valueTypeMarker == RdfCloudTripleStoreConstants.PLAIN_LITERAL_MARKER) {
-//            String label = readString(dataIn);
-//            ret = vf.createLiteral(label);
-//        } else if (valueTypeMarker == RdfCloudTripleStoreConstants.LANG_LITERAL_MARKER) {
-//            String label = readString(dataIn);
-//            String language = readString(dataIn);
-//            ret = vf.createLiteral(label, language);
-//        } else if (valueTypeMarker == RdfCloudTripleStoreConstants.DATATYPE_LITERAL_MARKER) {
-//            String label = readString(dataIn);
-//            URI datatype = (URI) readOldValue(dataIn, vf);
-//            ret = vf.createLiteral(label, datatype);
-//        } else {
-//            throw new InvalidValueTypeMarkerRuntimeException(valueTypeMarker, "Invalid value type marker: "
-//                    + valueTypeMarker);
-//        }
-//
-//        return ret;
-//    }
-//
-//    public static Statement translateOldStatementFromRow(ByteArrayDataInput input, String table, ValueFactory vf) throws IOException {
-//        Resource subject;
-//        URI predicate;
-//        org.openrdf.model.Value object;
-//        if ("spo".equals(table)) {
-//            subject = (Resource) readOldValue(input, vf);
-//            input.readByte();
-//            predicate = (URI) readOldValue(input, vf);
-//            input.readByte();
-//            object = readOldValue(input, vf);
-//        } else if ("o".equals(table)) {
-//            object = readOldValue(input, vf);
-//            input.readByte();
-//            predicate = (URI) readOldValue(input, vf);
-//            input.readByte();
-//            subject = (Resource) readOldValue(input, vf);
-//        } else if ("po".equals(table)) {
-//            predicate = (URI) readOldValue(input, vf);
-//            input.readByte();
-//            object = readOldValue(input, vf);
-//            input.readByte();
-//            subject = (Resource) readOldValue(input, vf);
-//        } else {
-//            //so
-//            subject = (Resource) readOldValue(input, vf);
-//            input.readByte();
-//            object = readOldValue(input, vf);
-//            input.readByte();
-//            predicate = (URI) readOldValue(input, vf);
-//        }
-//        return new StatementImpl(subject, predicate, object);
-//    }
-//
-//    public static byte[] writeOldValue(org.openrdf.model.Value value) throws IOException {
-//        if (value == null)
-//            return new byte[]{};
-//        ByteArrayDataOutput dataOut = ByteStreams.newDataOutput();
-//        if (value instanceof URI) {
-//            dataOut.writeByte(RdfCloudTripleStoreConstants.URI_MARKER);
-//            writeString(((URI) value).toString(), dataOut);
-//        } else if (value instanceof BNode) {
-//            dataOut.writeByte(RdfCloudTripleStoreConstants.BNODE_MARKER);
-//            writeString(((BNode) value).getID(), dataOut);
-//        } else if (value instanceof Literal) {
-//            Literal lit = (Literal) value;
-//
-//            String label = lit.getLabel();
-//            String language = lit.getLanguage();
-//            URI datatype = lit.getDatatype();
-//
-//            if (datatype != null) {
-//                dataOut.writeByte(RdfCloudTripleStoreConstants.DATATYPE_LITERAL_MARKER);
-//                writeString(label, dataOut);
-//                dataOut.write(writeOldValue(datatype));
-//            } else if (language != null) {
-//                dataOut.writeByte(RdfCloudTripleStoreConstants.LANG_LITERAL_MARKER);
-//                writeString(label, dataOut);
-//                writeString(language, dataOut);
-//            } else {
-//                dataOut.writeByte(RdfCloudTripleStoreConstants.PLAIN_LITERAL_MARKER);
-//                writeString(label, dataOut);
-//            }
-//        } else {
-//            throw new IllegalArgumentException("unexpected value type: "
-//                    + value.getClass());
-//        }
-//        return dataOut.toByteArray();
-//    }
-//
-//    private static String OLD_DELIM = "\u0001";
-//    private static byte[] OLD_DELIM_BYTES = OLD_DELIM.getBytes();
-//
-//    public static byte[] buildOldRowWith(byte[] bytes_one, byte[] bytes_two, byte[] bytes_three) throws IOException {
-//        ByteArrayDataOutput rowidout = ByteStreams.newDataOutput();
-//        rowidout.write(bytes_one);
-//        rowidout.write(OLD_DELIM_BYTES);
-//        rowidout.write(bytes_two);
-//        rowidout.write(OLD_DELIM_BYTES);
-//        rowidout.write(bytes_three);
-//        return truncateRowId(rowidout.toByteArray());
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/utils/MRUtils.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/utils/MRUtils.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/utils/MRUtils.java
deleted file mode 100644
index 950f585..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/utils/MRUtils.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package mvm.rya.cloudbase.mr.utils;
-
-import org.apache.hadoop.conf.Configuration;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-/**
- * Class MRSailUtils
- * Date: May 19, 2011
- * Time: 10:34:06 AM
- */
-public class MRUtils {
-
-    public static final String JOB_NAME_PROP = "mapred.job.name";
-
-    public static final String CB_USERNAME_PROP = "cb.username";
-    public static final String CB_PWD_PROP = "cb.pwd";
-    public static final String CB_ZK_PROP = "cb.zk";
-    public static final String CB_INSTANCE_PROP = "cb.instance";
-    public static final String CB_TTL_PROP = "cb.ttl";
-    public static final String CB_CV_PROP = "cb.cv";
-    public static final String CB_AUTH_PROP = "cb.auth";
-    public static final String CB_MOCK_PROP = "cb.mock";
-    public static final String TABLE_LAYOUT_PROP = "rdf.tablelayout";
-    public static final String FORMAT_PROP = "rdf.format";
-
-    public static final String NAMED_GRAPH_PROP = "rdf.graph";
-
-    public static final String TABLE_PREFIX_PROPERTY = "rdf.tablePrefix";
-
-    // rdf constants
-    public static final ValueFactory vf = new ValueFactoryImpl();
-    public static final URI RDF_TYPE = vf.createURI("http://www.w3.org/1999/02/22-rdf-syntax-ns#", "type");
-
-
-    // cloudbase map reduce utils
-
-//    public static Range retrieveRange(URI entry_key, URI entry_val) throws IOException {
-//        ByteArrayDataOutput startRowOut = ByteStreams.newDataOutput();
-//        startRowOut.write(RdfCloudTripleStoreUtils.writeValue(entry_key));
-//        if (entry_val != null) {
-//            startRowOut.write(RdfCloudTripleStoreConstants.DELIM_BYTES);
-//            startRowOut.write(RdfCloudTripleStoreUtils.writeValue(entry_val));
-//        }
-//        byte[] startrow = startRowOut.toByteArray();
-//        startRowOut.write(RdfCloudTripleStoreConstants.DELIM_STOP_BYTES);
-//        byte[] stoprow = startRowOut.toByteArray();
-//
-//        Range range = new Range(new Text(startrow), new Text(stoprow));
-//        return range;
-//    }
-
-
-    public static String getCBTtl(Configuration conf) {
-        return conf.get(CB_TTL_PROP);
-    }
-
-    public static String getCBUserName(Configuration conf) {
-        return conf.get(CB_USERNAME_PROP);
-    }
-
-    public static String getCBPwd(Configuration conf) {
-        return conf.get(CB_PWD_PROP);
-    }
-
-    public static String getCBZK(Configuration conf) {
-        return conf.get(CB_ZK_PROP);
-    }
-
-    public static String getCBInstance(Configuration conf) {
-        return conf.get(CB_INSTANCE_PROP);
-    }
-
-    public static void setCBUserName(Configuration conf, String str) {
-        conf.set(CB_USERNAME_PROP, str);
-    }
-
-    public static void setCBPwd(Configuration conf, String str) {
-        conf.set(CB_PWD_PROP, str);
-    }
-
-    public static void setCBZK(Configuration conf, String str) {
-        conf.set(CB_ZK_PROP, str);
-    }
-
-    public static void setCBInstance(Configuration conf, String str) {
-        conf.set(CB_INSTANCE_PROP, str);
-    }
-
-    public static void setCBTtl(Configuration conf, String str) {
-        conf.set(CB_TTL_PROP, str);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/BatchScannerCloseableIterable.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/BatchScannerCloseableIterable.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/BatchScannerCloseableIterable.java
deleted file mode 100644
index d3f8ae7..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/query/BatchScannerCloseableIterable.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package mvm.rya.cloudbase.query;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.base.Preconditions;
-import mango.collect.AbstractCloseableIterable;
-import mvm.rya.cloudbase.BatchScannerIterator;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-
-/**
- */
-public class BatchScannerCloseableIterable extends AbstractCloseableIterable<Map.Entry<Key, Value>> {
-
-    private BatchScanner scanner;
-
-    public BatchScannerCloseableIterable(BatchScanner scanner) {
-        Preconditions.checkNotNull(scanner);
-        this.scanner = scanner;
-    }
-
-    @Override
-    protected void doClose() throws IOException {
-        scanner.close();
-    }
-
-    @Override
-    protected Iterator<Map.Entry<Key, Value>> retrieveIterator() {
-        return new BatchScannerIterator(scanner.iterator());
-    }
-}


[24/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/assembly/job.xml
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/assembly/job.xml b/partition/mr.partition.rdf/src/main/assembly/job.xml
deleted file mode 100644
index 259b917..0000000
--- a/partition/mr.partition.rdf/src/main/assembly/job.xml
+++ /dev/null
@@ -1,38 +0,0 @@
-<assembly
-	xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
-	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
-	<id>job</id>
-	<formats>
-		<format>jar</format>
-	</formats>
-	<includeBaseDirectory>false</includeBaseDirectory>
-	<dependencySets>
-		<dependencySet>
-			<unpack>false</unpack>
-			<scope>runtime</scope>
-			<outputDirectory>lib</outputDirectory>
-			<excludes>
-				<exclude>org.apache.hadoop:hadoop-core</exclude>
-				<exclude>${artifact.groupId}:${artifact.artifactId}</exclude>
-			</excludes>
-		</dependencySet>
-		<dependencySet>
-			<unpack>false</unpack>
-			<scope>system</scope>
-			<outputDirectory>lib</outputDirectory>
-			<excludes>
-				<exclude>${artifact.groupId}:${artifact.artifactId}</exclude>
-			</excludes>
-		</dependencySet>
-	</dependencySets>
-	<fileSets>
-		<fileSet>
-			<directory>${basedir}/target/classes</directory>
-			<outputDirectory>/</outputDirectory>
-			<excludes>
-				<exclude>*.jar</exclude>
-			</excludes>
-		</fileSet>
-	</fileSets>
-</assembly>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/groovy/convertrdfdir.groovy
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/groovy/convertrdfdir.groovy b/partition/mr.partition.rdf/src/main/groovy/convertrdfdir.groovy
deleted file mode 100644
index e5e02ec..0000000
--- a/partition/mr.partition.rdf/src/main/groovy/convertrdfdir.groovy
+++ /dev/null
@@ -1,33 +0,0 @@
-import org.openrdf.rio.rdfxml.*
-import org.openrdf.rio.ntriples.NTriplesWriterFactory
-import org.openrdf.rio.RDFHandler
-
-@Grab(group='com.google.guava', module='guava', version='r06')
-@Grab(group='org.openrdf.sesame', module='sesame-rio-rdfxml', version='2.3.2')
-@Grab(group='org.openrdf.sesame', module='sesame-rio-ntriples', version='2.3.2')
-@Grab(group='org.slf4j', module='slf4j-simple', version='1.5.8')
-def convertDirRdfFormat(def dir, def outputFile) {
-  //read each file
-  assert dir.isDirectory()
-
-  def ntriplesWriter = NTriplesWriterFactory.newInstance().getWriter(new FileOutputStream(outputFile))
-
-  ntriplesWriter.startRDF()
-  dir.listFiles().each { it ->
-    //load file into rdfxml parser
-    def rdfxmlParser = RDFXMLParserFactory.newInstance().getParser()
-    rdfxmlParser.setRDFHandler(
-        [       startRDF: {},
-                endRDF: {},
-                handleNamespace: { def prefix, def uri -> ntriplesWriter.handleNamespace(prefix, uri)},
-                handleComment: {},
-                handleStatement: { def stmt ->  ntriplesWriter.handleStatement stmt}] as RDFHandler
-    )
-    rdfxmlParser.parse(new FileInputStream(it), "")
-  }
-  ntriplesWriter.endRDF()
-}
-
-try{
-convertDirRdfFormat(new File(args[0]), new File(args[1]))
-}catch(Exception e) {e.printStackTrace();}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/MrTstBed.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/MrTstBed.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/MrTstBed.java
deleted file mode 100644
index e8b2e5a..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/MrTstBed.java
+++ /dev/null
@@ -1,104 +0,0 @@
-package mvm.mmrts.rdf.partition.mr;
-
-import com.google.common.io.ByteStreams;
-import mvm.mmrts.rdf.partition.mr.transform.SparqlCloudbaseIFJob;
-
-import java.io.FileInputStream;
-
-/**
- * Class MrTstBed
- * Date: Sep 1, 2011
- * Time: 9:18:53 AM
- */
-public class MrTstBed {
-    public static void main(String[] args) {
-        try {
-//            String query = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-//                    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-//                    "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-//                    "SELECT * WHERE\n" +
-//                    "{\n" +
-//                    "?id tdp:reportedAt ?timestamp. \n" +
-//                    "FILTER(mvmpart:timeRange(?id, tdp:reportedAt, 1314898074000 , 1314898374000 , 'XMLDATETIME')).\n" +
-//                    "?id tdp:performedBy ?system.\n" +
-//                    "?id <http://here/2010/cmv/ns#hasMarkingText> \"U\".\n" +
-//                    "?id rdf:type tdp:Sent.\n" +
-//                    "} \n";
-
-            FileInputStream fis = new FileInputStream(args[0]);
-            String query = new String(ByteStreams.toByteArray(fis));
-            fis.close();
-
-//            String query = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-//                    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-//                    "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-//                    "SELECT * WHERE\n" +
-//                    "{\n" +
-//                    "?id tdp:reportedAt ?timestamp.\n" +
-//                    "FILTER(mvmpart:timeRange(?id, tdp:reportedAt, 1314381770000 , 1314381880000 , 'XMLDATETIME')).\n" +
-//                    "?id tdp:performedBy ?system.\n" +
-//                    "}";
-
-            new SparqlCloudbaseIFJob("partitionRdf", "root", "password", "stratus", "stratus13:2181", "/temp/queryout", MrTstBed.class, query).run();
-
-//            QueryParser parser = (new SPARQLParserFactory()).getParser();
-//            TupleExpr expr = parser.parseQuery(query, "http://www.w3.org/1999/02/22-rdf-syntax-ns#").getTupleExpr();
-//            System.out.println(expr);
-//
-//            final Configuration queryConf = new Configuration();
-//            expr.visit(new FilterTimeIndexVisitor(queryConf));
-//
-//            (new SubjectGroupingOptimizer(queryConf)).optimize(expr, null, null);
-//
-//            System.out.println(expr);
-//
-//            //make sure of only one shardlookup
-//            expr.visit(new QueryModelVisitorBase<RuntimeException>() {
-//                int count = 0;
-//
-//                @Override
-//                public void meetOther(QueryModelNode node) throws RuntimeException {
-//                    super.meetOther(node);
-//                    count++;
-//                    if (count > 1)
-//                        throw new IllegalArgumentException("Query can only have one subject-star lookup");
-//                }
-//            });
-//
-//            final Job job = new Job(queryConf);
-//            job.setJarByClass(MrTstBed.class);
-//
-//            expr.visit(new QueryModelVisitorBase<RuntimeException>() {
-//                @Override
-//                public void meetOther(QueryModelNode node) throws RuntimeException {
-//                    super.meetOther(node);
-//
-//                    //set up CloudbaseBatchScannerInputFormat here
-//                    if (node instanceof ShardSubjectLookup) {
-//                        System.out.println("Lookup: " + node);
-//                        try {
-//                            new SparqlCloudbaseIFTransformer((ShardSubjectLookup) node, queryConf, job, "partitionRdf",
-//                                    "root", "password", "stratus", "stratus13:2181");
-//                        } catch (QueryEvaluationException e) {
-//                            e.printStackTrace();
-//                        }
-//                    }
-//                }
-//            });
-//
-//            Path outputDir = new Path("/temp/sparql-out/testout");
-//            FileSystem dfs = FileSystem.get(outputDir.toUri(), queryConf);
-//            if (dfs.exists(outputDir))
-//                dfs.delete(outputDir, true);
-//
-//            FileOutputFormat.setOutputPath(job, outputDir);
-//
-//            // Submit the job
-//            Date startTime = new Date();
-//            System.out.println("Job started: " + startTime);
-//            job.waitForCompletion(true);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlPartitionStoreInputFormat.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlPartitionStoreInputFormat.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlPartitionStoreInputFormat.java
deleted file mode 100644
index 15c9c79..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlPartitionStoreInputFormat.java
+++ /dev/null
@@ -1,411 +0,0 @@
-package mvm.mmrts.rdf.partition.mr;
-
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.util.ArgumentChecker;
-import mvm.mmrts.rdf.partition.PartitionSail;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.*;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.*;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.net.URLDecoder;
-import java.net.URLEncoder;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Class SparqlPartitionStoreInputFormat
- * Date: Oct 28, 2010
- * Time: 11:48:17 AM
- */
-public class SparqlPartitionStoreInputFormat extends InputFormat<LongWritable, MapWritable> {
-
-    public static final String PREFIX = "mvm.mmrts.rdf.partition.mr.sparqlinputformat";
-    public static final String INPUT_INFO_HAS_BEEN_SET = PREFIX + ".configured";
-    public static final String INSTANCE_HAS_BEEN_SET = PREFIX + ".instanceConfigured";
-    public static final String USERNAME = PREFIX + ".username";
-    public static final String PASSWORD = PREFIX + ".password";
-
-    public static final String INSTANCE_NAME = PREFIX + ".instanceName";
-    public static final String ZK = PREFIX + ".zk";
-
-    public static final String STARTTIME = PREFIX + ".starttime";
-    public static final String ENDTIME = PREFIX + ".endtime";
-    public static final String TABLE = PREFIX + ".table";
-    public static final String SHARD_TABLE = PREFIX + ".shardtable";
-    public static final String SPARQL_QUERIES_PROP = PREFIX + ".sparql";
-    public static final String MR_NUMTHREADS_PROP = PREFIX + ".numthreads";
-//    public static final String RANGE_PROP = PREFIX + ".range";
-//    public static final String NUM_RANGES_PROP = PREFIX + ".numranges";
-//    public static final String TABLE_PREFIX_PROP = PREFIX + ".tablePrefix";
-//    public static final String OFFSET_RANGE_PROP = PREFIX + ".offsetrange";
-
-//    public static final String INFER_PROP = PREFIX + ".infer";
-
-    private static final String UTF_8 = "UTF-8";
-
-    private static final ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    static class SparqlInputSplit extends InputSplit implements Writable {
-
-        protected String sparql;
-        protected String startTime;
-        protected String endTime;
-        protected String table;
-//        private Long offset;
-//        private Long limit;
-
-        private SparqlInputSplit() {
-        }
-
-        private SparqlInputSplit(String sparql, String startTime, String endTime, String table) {
-            this.sparql = sparql;
-            this.startTime = startTime;
-            this.endTime = endTime;
-            this.table = table;
-//            this.offset = offset;
-//            this.limit = limit;
-        }
-
-        @Override
-        public long getLength() throws IOException, InterruptedException {
-            return 0;
-        }
-
-        @Override
-        public String[] getLocations() throws IOException, InterruptedException {
-            return new String[]{sparql};
-        }
-
-        @Override
-        public void write(DataOutput dataOutput) throws IOException {
-            boolean startTimeExists = startTime != null;
-            dataOutput.writeBoolean(startTimeExists);
-            if (startTimeExists)
-                dataOutput.writeUTF(startTime);
-
-            boolean endTimeExists = endTime != null;
-            dataOutput.writeBoolean(endTimeExists);
-            if (endTimeExists)
-                dataOutput.writeUTF(endTime);
-
-            dataOutput.writeUTF(table);
-            dataOutput.writeUTF(sparql);
-        }
-
-        @Override
-        public void readFields(DataInput dataInput) throws IOException {
-            if (dataInput.readBoolean())
-                this.startTime = dataInput.readUTF();
-            if (dataInput.readBoolean())
-                this.endTime = dataInput.readUTF();
-            this.table = dataInput.readUTF();
-            this.sparql = dataInput.readUTF();
-        }
-    }
-
-    /**
-     * Create a SparqlInputSplit for every sparql query.<br>
-     * Separate a single sparql query into numRanges of time ranges. For example,
-     * a numRange of 3, with range of 1 day (ms), and 1 query, will have 3 input splits
-     * with the same query, however the first range will go from now to a day before, the second
-     * will go from the day before to the day before that, the third will go from the two days
-     * ago to forever back.
-     * <br><br>
-     * If the numRanges is not set, or set to 1, the inputsplit can only focus on a certain startTime,
-     * ttl. If these are not set, then look at all time.
-     *
-     * @param job
-     * @return
-     * @throws java.io.IOException
-     * @throws InterruptedException
-     */
-    @Override
-    public List<InputSplit> getSplits(JobContext job) throws IOException, InterruptedException {
-        validateOptions(job.getConfiguration());
-        final Collection<String> queries = getSparqlQueries(job.getConfiguration());
-        if (queries == null || queries.size() == 0)
-            throw new IOException("Queries cannot be null or empty");
-
-        String startTime_s = getStartTime(job.getConfiguration());
-        String endTime_s = getEndTime(job.getConfiguration());
-
-        List<InputSplit> splits = new ArrayList<InputSplit>();
-        for (String query : queries) {
-            splits.add(new SparqlInputSplit(query, startTime_s, endTime_s, getTable(job.getConfiguration())));
-        }
-        return splits;
-    }
-
-    @Override
-    public RecordReader<LongWritable, MapWritable> createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
-            throws IOException, InterruptedException {
-        return new SparqlResultsRecordReader(taskAttemptContext.getConfiguration());
-    }
-
-    protected static String getUsername(Configuration conf) {
-        return conf.get(USERNAME);
-    }
-
-    /**
-     * WARNING: The password is stored in the Configuration and shared with all
-     * MapReduce tasks; It is BASE64 encoded to provide a charset safe
-     * conversion to a string, and is not intended to be secure.
-     */
-    protected static String getPassword(Configuration conf) {
-        return new String(Base64.decodeBase64(conf.get(PASSWORD, "").getBytes()));
-    }
-
-    protected static String getInstance(Configuration conf) {
-        return conf.get(INSTANCE_NAME);
-    }
-
-    public static void setSparqlQueries(JobContext job, String... queries) {
-        if (queries == null || queries.length == 0)
-            throw new IllegalArgumentException("Queries cannot be null or empty");
-
-        final Configuration conf = job.getConfiguration();
-        setSparqlQueries(conf, queries);
-    }
-
-    public static void setSparqlQueries(Configuration conf, String... queries) {
-        try {
-            Collection<String> qencs = new ArrayList<String>();
-            for (String query : queries) {
-                final String qenc = URLEncoder.encode(query, UTF_8);
-                qencs.add(qenc);
-            }
-            conf.setStrings(SPARQL_QUERIES_PROP, qencs.toArray(new String[qencs.size()]));
-        } catch (UnsupportedEncodingException e) {
-            //what to do...
-            e.printStackTrace();
-        }
-    }
-
-    public static Collection<String> getSparqlQueries(Configuration conf) {
-        Collection<String> queries = new ArrayList<String>();
-        final Collection<String> qencs = conf.getStringCollection(SPARQL_QUERIES_PROP);
-        for (String qenc : qencs) {
-            queries.add(qenc);
-        }
-        return queries;
-    }
-
-    public static void setLongJob(JobContext job, Long time) {
-        Configuration conf = job.getConfiguration();
-        //need to make the runtime longer, default 30 min
-        time = (time == null) ? 1800000 : time;
-        conf.setLong("mapreduce.tasktracker.healthchecker.script.timeout", time);
-        conf.set("mapred.child.java.opts", "-Xmx1G");
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-    }
-
-    public static void setInputInfo(JobContext job, String user, byte[] passwd) {
-        Configuration conf = job.getConfiguration();
-        if (conf.getBoolean(INPUT_INFO_HAS_BEEN_SET, false))
-            throw new IllegalStateException("Input info can only be set once per job");
-        conf.setBoolean(INPUT_INFO_HAS_BEEN_SET, true);
-
-        ArgumentChecker.notNull(user, passwd);
-        conf.set(USERNAME, user);
-        conf.set(PASSWORD, new String(Base64.encodeBase64(passwd)));
-    }
-
-    public static void setEndTime(JobContext job, String endTime) {
-        Configuration conf = job.getConfiguration();
-        conf.set(ENDTIME, endTime);
-    }
-
-    public static String getEndTime(Configuration conf) {
-        return conf.get(ENDTIME);
-    }
-
-    public static void setNumThreads(JobContext job, int numThreads) {
-        Configuration conf = job.getConfiguration();
-        conf.setInt(MR_NUMTHREADS_PROP, numThreads);
-    }
-
-    public static int getNumThreads(Configuration conf) {
-        return conf.getInt(MR_NUMTHREADS_PROP, -1);
-    }
-
-    public static void setTable(JobContext job, String table) {
-        Configuration conf = job.getConfiguration();
-        conf.set(TABLE, table);
-    }
-
-    public static String getTable(Configuration conf) {
-        return conf.get(TABLE);
-    }
-
-    public static void setShardTable(JobContext job, String table) {
-        Configuration conf = job.getConfiguration();
-        conf.set(SHARD_TABLE, table);
-    }
-
-    public static String getShardTable(Configuration conf) {
-        String t = conf.get(SHARD_TABLE);
-        return (t != null) ? t : getTable(conf);
-    }
-
-    public static void setStartTime(JobContext job, String startTime) {
-        Configuration conf = job.getConfiguration();
-        conf.set(STARTTIME, startTime);
-    }
-
-    public static String getStartTime(Configuration conf) {
-        return conf.get(STARTTIME);
-    }
-
-    public static void setZooKeeperInstance(JobContext job, String instanceName, String zk) {
-        Configuration conf = job.getConfiguration();
-        if (conf.getBoolean(INSTANCE_HAS_BEEN_SET, false))
-            throw new IllegalStateException("Instance info can only be set once per job");
-        conf.setBoolean(INSTANCE_HAS_BEEN_SET, true);
-
-        ArgumentChecker.notNull(instanceName, zk);
-        conf.set(INSTANCE_NAME, instanceName);
-        conf.set(ZK, zk);
-    }
-
-    protected static void validateOptions(Configuration conf) throws IOException {
-        if (!conf.getBoolean(INPUT_INFO_HAS_BEEN_SET, false))
-            throw new IOException("Input info has not been set.");
-        if (!conf.getBoolean(INSTANCE_HAS_BEEN_SET, false))
-            throw new IOException("Instance info has not been set.");
-        if (conf.getStrings(SPARQL_QUERIES_PROP) == null)
-            throw new IOException("Sparql queries have not been set.");
-    }
-
-    private class SparqlResultsRecordReader extends RecordReader<LongWritable, MapWritable>
-//            implements TupleQueryResultWriter, Runnable
-    {
-
-        boolean closed = false;
-        long count = 0;
-        BlockingQueue<MapWritable> queue = new LinkedBlockingQueue<MapWritable>();
-        private Repository repo;
-        String query;
-
-        Configuration conf;
-        private TupleQueryResult result;
-        private RepositoryConnection conn;
-
-        public SparqlResultsRecordReader(Configuration conf) {
-            this.conf = conf;
-        }
-
-        @Override
-        public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
-
-            try {
-                validateOptions(conf);
-
-                SparqlInputSplit sis = (SparqlInputSplit) inputSplit;
-                this.query = sis.sparql;
-
-                // init RdfCloudTripleStore
-                final PartitionSail store = new PartitionSail(new ZooKeeperInstance(getInstance(conf),
-                        conf.get(ZK)).getConnector(getUsername(conf), getPassword(conf).getBytes()), getTable(conf), getShardTable(conf));
-
-                repo = new SailRepository(store);
-                repo.initialize();
-
-                conn = repo.getConnection();
-                query = URLDecoder.decode(query, UTF_8);
-                TupleQuery tupleQuery = conn.prepareTupleQuery(
-                        QueryLanguage.SPARQL, query);
-
-                if (sis.startTime != null && sis.endTime != null) {
-                    tupleQuery.setBinding(START_BINDING, vf.createLiteral(sis.startTime));
-                    tupleQuery.setBinding(END_BINDING, vf.createLiteral(sis.endTime));
-                }
-
-                int threads = getNumThreads(conf);
-                if (threads > 0) {
-                    tupleQuery.setBinding(NUMTHREADS_PROP, vf.createLiteral(threads));
-                }
-
-                result = tupleQuery.evaluate();
-            } catch (Exception e) {
-                throw new IOException("Exception occurred opening Repository", e);
-            }
-        }
-
-        @Override
-        public boolean nextKeyValue() throws IOException, InterruptedException {
-            try {
-                return result.hasNext();
-            } catch (QueryEvaluationException e) {
-                throw new IOException(e);
-            }
-//            return false;
-        }
-
-        @Override
-        public LongWritable getCurrentKey() throws IOException, InterruptedException {
-            return new LongWritable(count++);
-        }
-
-        @Override
-        public MapWritable getCurrentValue() throws IOException, InterruptedException {
-            try {
-                if (result.hasNext()) {
-                    BindingSet bindingSet = result.next();
-                    return transformRow(bindingSet);
-                }
-                return null;
-            } catch (QueryEvaluationException e) {
-                throw new IOException(e);
-            }
-        }
-
-        @Override
-        public float getProgress() throws IOException, InterruptedException {
-            return (closed) ? (1) : (0);
-        }
-
-        @Override
-        public void close() throws IOException {
-            closed = true;
-            try {
-                conn.close();
-                repo.shutDown();
-            } catch (RepositoryException e) {
-                throw new IOException("Exception occurred closing Repository", e);
-            }
-        }
-
-        MapWritable mw = new MapWritable();
-
-        protected MapWritable transformRow(BindingSet bindingSet) {
-            mw.clear(); //handle the case of optional bindings. -mbraun
-            for (String name : bindingSet.getBindingNames()) {
-                final Text key = new Text(name);
-                final Text value = new Text(bindingSet.getValue(name).stringValue());
-                mw.put(key, value);
-            }
-            return mw;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlTestDriver.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlTestDriver.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlTestDriver.java
deleted file mode 100644
index 4b369ae..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/SparqlTestDriver.java
+++ /dev/null
@@ -1,155 +0,0 @@
-package mvm.mmrts.rdf.partition.mr;
-
-import com.google.common.io.ByteStreams;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.Date;
-
-/**
- * Class SparqlTestDriver
- * Date: Oct 28, 2010
- * Time: 2:53:39 PM
- */
-public class SparqlTestDriver implements Tool {
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new SparqlTestDriver(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private Configuration conf;
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public int run(String[] args) throws IOException, InterruptedException,
-            ClassNotFoundException {
-
-        //query from file
-        if(args.length < 2) {
-            throw new IllegalArgumentException("Usage: hadoop jar mvm.mmrts.rdf.partition.mr.SparqlTestDriver <local query file> outputFile");
-        }
-
-        FileInputStream fis = new FileInputStream(args[0]);
-        String query = new String(ByteStreams.toByteArray(fis));
-        fis.close();
-
-        Job job = new Job(conf);
-        job.setJarByClass(SparqlTestDriver.class);
-
-        // set up cloudbase input
-        job.setInputFormatClass(SparqlPartitionStoreInputFormat.class);
-        SparqlPartitionStoreInputFormat.setInputInfo(job, "root", "password".getBytes());
-        SparqlPartitionStoreInputFormat.setZooKeeperInstance(job, "stratus", "10.40.190.113:2181");
-        SparqlPartitionStoreInputFormat.setLongJob(job, null);
-        SparqlPartitionStoreInputFormat.setTable(job, "partitionRdf");
-
-        long startTime_l = 1303811164088l;
-        long ttl = 86400000;
-
-        //set query
-//        String query = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-//                "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-//                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-//                "SELECT * WHERE\n" +
-//                "{\n" +
-//                "?id tdp:reportedAt ?timestamp. \n" +
-//                "FILTER(mvmpart:timeRange(?id, tdp:reportedAt, 1314380456900 , 1314384056900 , 'XMLDATETIME')).\n" +
-//                "?id tdp:performedBy ?system.\n" +
-//                "} \n";
-//
-//        String query2 = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-//                "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-//                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-//                "SELECT * WHERE\n" +
-//                "{\n" +
-//                "?id hb:timeStamp ?timestamp. \n" +
-//                "FILTER(mvmpart:timeRange(?id, hb:timeStamp, 1314360009522 , 1314367209522 , 'TIMESTAMP')).\n" +
-//                "?id hb:count ?count.\n" +
-//                "?id hb:systemName ?systemName.\n" +
-//                "} ";
-
-        System.out.println(query);
-        System.out.println();
-//        System.out.println(query2);
-
-        SparqlPartitionStoreInputFormat.setSparqlQueries(job, query);
-//        SparqlCloudbaseStoreInputFormat.setStartTime(job, 1309956861000l + "");
-//        SparqlCloudbaseStoreInputFormat.setTtl(job, 86400000 + "");
-
-        // set input output of the particular job
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(Text.class);
-        job.setOutputKeyClass(Text.class);
-        job.setOutputValueClass(Text.class);
-
-        //job.setOutputFormatClass(FileOutputFormat.class);
-
-
-        // set mapper and reducer classes
-        job.setMapperClass(MyTempMapper.class);
-        job.setReducerClass(Reducer.class);
-        job.setNumReduceTasks(1);
-
-        // set output
-        Path outputDir = new Path(args[1]);
-        FileSystem dfs = FileSystem.get(outputDir.toUri(), conf);
-        if (dfs.exists(outputDir))
-            dfs.delete(outputDir, true);
-
-        FileOutputFormat.setOutputPath(job, outputDir);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return (int) job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    public static class MyTempMapper extends Mapper<LongWritable, MapWritable, Text, Text> {
-        Text outKey = new Text();
-        Text outValue = new Text("partition");
-        @Override
-        protected void map(LongWritable key, MapWritable value, Context context) throws IOException, InterruptedException {
-            outKey.set(value.values().toString());
-            context.write(outKey, outValue);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/TestDriver.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/TestDriver.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/TestDriver.java
deleted file mode 100644
index 80255ba..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/TestDriver.java
+++ /dev/null
@@ -1,154 +0,0 @@
-package mvm.mmrts.rdf.partition.mr;
-
-import com.google.common.io.ByteStreams;
-import com.google.common.primitives.Bytes;
-import mvm.mmrts.rdf.partition.PartitionConstants;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.model.Statement;
-
-import java.io.IOException;
-import java.util.Date;
-
-/**
- * Class SparqlTestDriver
- * Date: Oct 28, 2010
- * Time: 2:53:39 PM
- */
-public class TestDriver implements Tool {
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new TestDriver(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    private Configuration conf;
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public int run(String[] args) throws IOException, InterruptedException,
-            ClassNotFoundException {
-
-        Job job = new Job(conf);
-        job.setJarByClass(TestDriver.class);
-
-        FileInputFormat.addInputPaths(job, "/temp/rpunnoose/results.txt");
-        job.setInputFormatClass(TextInputFormat.class);
-
-        // set input output of the particular job
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(MapWritable.class);
-        job.setOutputKeyClass(Text.class);
-        job.setOutputValueClass(Text.class);
-
-        job.setOutputFormatClass(TextOutputFormat.class);
-
-        // set mapper and reducer classes
-        job.setMapperClass(SubjectMapWrMapper.class);
-        job.setReducerClass(OutMapWrReducer.class);
-        job.setNumReduceTasks(1);
-//        job.setNumReduceTasks(0);
-
-        // set output
-        Path outputDir = new Path("/temp/rpunnoose/partBS");
-        FileSystem dfs = FileSystem.get(outputDir.toUri(), conf);
-        if (dfs.exists(outputDir))
-            dfs.delete(outputDir, true);
-
-        FileOutputFormat.setOutputPath(job, outputDir);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return (int) job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    public static class SubjectMapWrMapper extends Mapper<LongWritable, Text, Text, MapWritable> {
-        Text outKey = new Text();
-        final String ID = "id";
-        final Text ID_TXT = new Text(ID);
-        final String PERF_AT = "performedBy";
-        final Text PERF_AT_TXT = new Text("system");
-        final String REPORT_AT = "reportedAt";
-        final Text REPORT_AT_TXT = new Text("timestamp");
-        final String TYPE = "type";
-        final Text TYPE_TXT = new Text(TYPE);
-
-        @Override
-        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
-            String s = value.toString();
-            int i = s.lastIndexOf("\0");
-            Statement stmt = RdfIO.readStatement(ByteStreams.newDataInput(s.substring(0, i).getBytes()), PartitionConstants.VALUE_FACTORY);
-            String predStr = stmt.getPredicate().stringValue();
-            if (!predStr.contains(PERF_AT) && !predStr.contains(REPORT_AT) && !predStr.contains(TYPE))
-                return;
-
-            outKey.set(stmt.getSubject().stringValue());
-            MapWritable mw = new MapWritable();
-            mw.put(ID_TXT, outKey);
-            if (predStr.contains(PERF_AT))
-                mw.put(PERF_AT_TXT, new Text(stmt.getObject().stringValue()));
-            else if (predStr.contains(REPORT_AT))
-                mw.put(REPORT_AT_TXT, new Text(stmt.getObject().stringValue()));
-            else if (predStr.contains(TYPE))
-                mw.put(TYPE_TXT, new Text(stmt.getObject().stringValue()));
-
-            context.write(outKey, mw);
-        }
-    }
-
-    public static class OutMapWrReducer extends Reducer<Text, MapWritable, Text, Text> {
-        final Text PART = new Text("partitionBS");
-        Text outKey = new Text();
-
-        @Override
-        protected void reduce(Text key, Iterable<MapWritable> values, Context context) throws IOException, InterruptedException {
-            MapWritable mw = new MapWritable();
-            for (MapWritable value : values) {
-                mw.putAll(value);
-            }
-            outKey.set(mw.values().toString());
-            context.write(outKey, PART);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatTool.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatTool.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatTool.java
deleted file mode 100644
index 2b4565f..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatTool.java
+++ /dev/null
@@ -1,229 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.compat;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.mmrts.rdf.partition.PartitionConstants;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-/**
- * MMRTS-148 Need to move the shard index from the partition table to the shardIndex table
- * Class MoveShardIndexTool
- * Date: Dec 8, 2011
- * Time: 4:11:40 PM
- */
-public class ChangeShardDateFormatTool implements Tool {
-    public static final String CB_USERNAME_PROP = "cb.username";
-    public static final String CB_PWD_PROP = "cb.pwd";
-    public static final String CB_ZK_PROP = "cb.zk";
-    public static final String CB_INSTANCE_PROP = "cb.instance";
-    public static final String PARTITION_TABLE_PROP = "partition.table";
-    public static final String OLD_DATE_FORMAT_PROP = "date.format.old";
-    public static final String NEW_DATE_FORMAT_PROP = "date.format.new";
-    public static final String OLD_DATE_SHARD_DELIM = "date.shard.delim.old";
-    public static final String NEW_DATE_SHARD_DELIM = "date.shard.delim.new";
-
-
-    private Configuration conf;
-
-    private String userName = "root";
-    private String pwd = "password";
-    private String instance = "stratus";
-    private String zk = "10.40.190.113:2181";
-    private String partitionTable = "rdfPartition";
-    private String oldDateFormat = "yyyy-MM";
-    private String newDateFormat = "yyyyMMdd";
-    private String oldDateDelim = "-";
-    private String newDateDelim = "_";
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new ChangeShardDateFormatTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    @Override
-    public int run(String[] args) throws Exception {
-        runJob(args);
-        return 0;
-    }
-
-    public long runJob(String[] args) throws Exception {
-        //faster
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-        conf.set("io.sort.mb", "256");
-
-        zk = conf.get(CB_ZK_PROP, zk);
-        instance = conf.get(CB_INSTANCE_PROP, instance);
-        userName = conf.get(CB_USERNAME_PROP, userName);
-        pwd = conf.get(CB_PWD_PROP, pwd);
-        partitionTable = conf.get(PARTITION_TABLE_PROP, partitionTable);
-        oldDateFormat = conf.get(OLD_DATE_FORMAT_PROP, oldDateFormat);
-        newDateFormat = conf.get(NEW_DATE_FORMAT_PROP, newDateFormat);
-        oldDateDelim = conf.get(OLD_DATE_SHARD_DELIM, oldDateDelim);
-        newDateDelim = conf.get(NEW_DATE_SHARD_DELIM, newDateDelim);
-        conf.set(NEW_DATE_FORMAT_PROP, newDateFormat);
-        conf.set(OLD_DATE_FORMAT_PROP, oldDateFormat);
-        conf.set(PARTITION_TABLE_PROP, partitionTable);
-        conf.set(OLD_DATE_SHARD_DELIM, oldDateDelim);
-        conf.set(NEW_DATE_SHARD_DELIM, newDateDelim);
-
-        Job job = new Job(conf);
-        job.setJarByClass(ChangeShardDateFormatTool.class);
-
-        job.setInputFormatClass(CloudbaseInputFormat.class);
-        //TODO: How should I send in Auths?
-        CloudbaseInputFormat.setInputInfo(job, userName, pwd.getBytes(),
-                partitionTable, CBConstants.NO_AUTHS);
-        CloudbaseInputFormat.setZooKeeperInstance(job, instance, zk);
-
-        job.setMapperClass(ChangeDateFormatMapper.class);
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(Mutation.class);
-
-        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-        CloudbaseOutputFormat.setOutputInfo(job, userName, pwd.getBytes(), true, partitionTable);
-        CloudbaseOutputFormat.setZooKeeperInstance(job, instance, zk);
-
-        job.setNumReduceTasks(0);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    public static class ChangeDateFormatMapper extends Mapper<Key, Value, Text, Mutation> {
-        private SimpleDateFormat oldDateFormat_df;
-        private SimpleDateFormat newDateFormat_df;
-        private Text partTableTxt;
-        private String newDateDelim;
-        private String oldDateDelim;
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            String oldDateFormat = context.getConfiguration().get(OLD_DATE_FORMAT_PROP);
-            if (oldDateFormat == null)
-                throw new IllegalArgumentException("Old Date Format property cannot be null");
-
-            oldDateFormat_df = new SimpleDateFormat(oldDateFormat);
-
-            String newDateFormat = context.getConfiguration().get(NEW_DATE_FORMAT_PROP);
-            if (newDateFormat == null)
-                throw new IllegalArgumentException("New Date Format property cannot be null");
-
-            newDateFormat_df = new SimpleDateFormat(newDateFormat);
-
-            String partTable = context.getConfiguration().get(PARTITION_TABLE_PROP);
-            if (partTable == null)
-                throw new IllegalArgumentException("Partition Table property cannot be null");
-
-            partTableTxt = new Text(partTable);
-
-            oldDateDelim = context.getConfiguration().get(OLD_DATE_SHARD_DELIM);
-            if (oldDateDelim == null)
-                throw new IllegalArgumentException("Old Date Shard Delimiter property cannot be null");
-
-            newDateDelim = context.getConfiguration().get(NEW_DATE_SHARD_DELIM);
-            if (newDateDelim == null)
-                throw new IllegalArgumentException("New Date Shard Delimiter property cannot be null");
-
-        }
-
-        @Override
-        protected void map(Key key, Value value, Context context) throws IOException, InterruptedException {
-            try {
-                String cf = key.getColumnFamily().toString();
-                if ("event".equals(cf) || "index".equals(cf)) {
-                    String shard = key.getRow().toString();
-                    int shardIndex = shard.lastIndexOf(oldDateDelim);
-                    if (shardIndex == -1)
-                        return; //no shard?
-                    String date_s = shard.substring(0, shardIndex);
-                    String shardValue = shard.substring(shardIndex + 1, shard.length());
-
-                    Date date = oldDateFormat_df.parse(date_s);
-                    String newShard = newDateFormat_df.format(date) + newDateDelim + shardValue;
-
-                    Mutation mutation = new Mutation(new Text(newShard));
-                    mutation.put(key.getColumnFamily(), key.getColumnQualifier(),
-                            new ColumnVisibility(key.getColumnVisibility()), System.currentTimeMillis(), value);
-                    context.write(partTableTxt, mutation);
-
-                    //delete
-                    mutation = new Mutation(key.getRow());
-                    mutation.putDelete(key.getColumnFamily(), key.getColumnQualifier(), System.currentTimeMillis());
-
-                    context.write(partTableTxt, mutation);
-                } else {
-                    //shard index
-                    String shard = key.getColumnFamily().toString();
-                    int shardIndex = shard.lastIndexOf(oldDateDelim);
-                    if (shardIndex == -1)
-                        return; //no shard?
-
-                    String date_s = shard.substring(0, shardIndex);
-                    String shardValue = shard.substring(shardIndex + 1, shard.length());
-
-                    Date date = oldDateFormat_df.parse(date_s);
-                    String newShard = newDateFormat_df.format(date) + newDateDelim + shardValue;
-                    
-                    Mutation mutation = new Mutation(key.getRow());
-                    mutation.put(new Text(newShard), key.getColumnQualifier(),
-                            new ColumnVisibility(key.getColumnVisibility()), System.currentTimeMillis(), value);
-
-                    //delete
-                    mutation.putDelete(key.getColumnFamily(), key.getColumnQualifier(), System.currentTimeMillis());
-                    context.write(partTableTxt, mutation);
-                }
-            } catch (ParseException pe) {
-                //only do work for the rows that match the old date format
-                //throw new IOException(pe);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/MoveShardIndexTool.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/MoveShardIndexTool.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/MoveShardIndexTool.java
deleted file mode 100644
index ba2eece..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/compat/MoveShardIndexTool.java
+++ /dev/null
@@ -1,171 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.compat;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.mmrts.rdf.partition.PartitionConstants;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.Date;
-
-/**
- * MMRTS-148 Need to move the shard index from the partition table to the shardIndex table
- * Class MoveShardIndexTool
- * Date: Dec 8, 2011
- * Time: 4:11:40 PM
- */
-public class MoveShardIndexTool implements Tool {
-    public static final String CB_USERNAME_PROP = "cb.username";
-    public static final String CB_PWD_PROP = "cb.pwd";
-    public static final String CB_ZK_PROP = "cb.zk";
-    public static final String CB_INSTANCE_PROP = "cb.instance";
-    public static final String PARTITION_TABLE_PROP = "partition.table";
-    public static final String SHARD_INDEX_TABLE_PROP = "shard.index.table";
-    public static final String SHARD_INDEX_DELETE_PROP = "shard.index.delete";
-
-
-    private Configuration conf;
-
-    private String userName = "root";
-    private String pwd = "password";
-    private String instance = "stratus";
-    private String zk = "10.40.190.113:2181";
-    private String partitionTable = "rdfPartition";
-    private String shardIndexTable = "rdfShardIndex";
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new MoveShardIndexTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    @Override
-    public int run(String[] args) throws Exception {
-        runJob(args);
-        return 0;
-    }
-
-    public long runJob(String[] args) throws Exception {
-        //faster
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-        conf.set("io.sort.mb", "256");
-
-        zk = conf.get(CB_ZK_PROP, zk);
-        instance = conf.get(CB_INSTANCE_PROP, instance);
-        userName = conf.get(CB_USERNAME_PROP, userName);
-        pwd = conf.get(CB_PWD_PROP, pwd);
-        partitionTable = conf.get(PARTITION_TABLE_PROP, partitionTable);
-        shardIndexTable = conf.get(SHARD_INDEX_TABLE_PROP, shardIndexTable);
-        conf.set(SHARD_INDEX_TABLE_PROP, shardIndexTable);
-        conf.set(PARTITION_TABLE_PROP, partitionTable);
-
-        Job job = new Job(conf);
-        job.setJarByClass(MoveShardIndexTool.class);
-
-        job.setInputFormatClass(CloudbaseInputFormat.class);
-        //TODO: How should I send in Auths?
-        CloudbaseInputFormat.setInputInfo(job, userName, pwd.getBytes(),
-                partitionTable, CBConstants.NO_AUTHS);
-        CloudbaseInputFormat.setZooKeeperInstance(job, instance, zk);
-        CloudbaseInputFormat.setRanges(job, Collections.singleton(
-                new Range(
-                        new Text(PartitionConstants.URI_MARKER_STR),
-                        new Text(PartitionConstants.PLAIN_LITERAL_MARKER_STR))));
-
-        job.setMapperClass(ShardKeyValueToMutationMapper.class);
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(Mutation.class);
-
-        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-        CloudbaseOutputFormat.setOutputInfo(job, userName, pwd.getBytes(), true, shardIndexTable);
-        CloudbaseOutputFormat.setZooKeeperInstance(job, instance, zk);
-
-        job.setNumReduceTasks(0);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    public static class ShardKeyValueToMutationMapper extends Mapper<Key, Value, Text, Mutation> {
-        private Text shardTableTxt;
-        private Text partTableTxt;
-        protected boolean deletePrevShardIndex;
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            String shardTable = context.getConfiguration().get(SHARD_INDEX_TABLE_PROP);
-            if (shardTable == null)
-                throw new IllegalArgumentException("Shard Table property cannot be null");
-
-            shardTableTxt = new Text(shardTable);
-
-            String partTable = context.getConfiguration().get(PARTITION_TABLE_PROP);
-            if (partTable == null)
-                throw new IllegalArgumentException("Partition Table property cannot be null");
-
-            partTableTxt = new Text(partTable);
-
-            deletePrevShardIndex = context.getConfiguration().getBoolean(SHARD_INDEX_DELETE_PROP, false);
-            System.out.println("Deleting shard index from previous: " + deletePrevShardIndex + " Part: " + partTableTxt);
-        }
-
-        @Override
-        protected void map(Key key, Value value, Context context) throws IOException, InterruptedException {
-            Mutation mutation = new Mutation(key.getRow());
-            mutation.put(key.getColumnFamily(), key.getColumnQualifier(),
-                    new ColumnVisibility(key.getColumnVisibility()), System.currentTimeMillis(), value);
-
-            context.write(shardTableTxt, mutation);
-
-            if (deletePrevShardIndex) {
-                mutation = new Mutation(key.getRow());
-                mutation.putDelete(key.getColumnFamily(), key.getColumnQualifier(), System.currentTimeMillis());
-
-                context.write(partTableTxt, mutation);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputFormat.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputFormat.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputFormat.java
deleted file mode 100644
index b347a56..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputFormat.java
+++ /dev/null
@@ -1,155 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.fileinput;
-
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.openrdf.model.Statement;
-import org.openrdf.rio.*;
-
-import java.io.IOException;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-/**
- * Be able to input multiple rdf formatted files. Convert from rdf format to statements.
- * Class RdfFileInputFormat
- * Date: May 16, 2011
- * Time: 2:11:24 PM
- */
-public class RdfFileInputFormat extends FileInputFormat<LongWritable, BytesWritable> {
-
-    public static final String RDF_FILE_FORMAT = "mvm.mmrts.rdf.cloudbase.sail.mr.fileinput.rdfformat";
-
-    @Override
-    public RecordReader<LongWritable, BytesWritable> createRecordReader(InputSplit inputSplit,
-                                                                            TaskAttemptContext taskAttemptContext)
-            throws IOException, InterruptedException {
-        return new RdfFileRecordReader();
-    }
-
-    private class RdfFileRecordReader extends RecordReader<LongWritable, BytesWritable> implements RDFHandler {
-
-        boolean closed = false;
-        long count = 0;
-        BlockingQueue<BytesWritable> queue = new LinkedBlockingQueue<BytesWritable>();
-        int total = 0;
-
-        @Override
-        public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
-            FileSplit fileSplit = (FileSplit) inputSplit;
-            Configuration conf = taskAttemptContext.getConfiguration();
-            String rdfForm_s = conf.get(RDF_FILE_FORMAT, RDFFormat.RDFXML.getName());
-            RDFFormat rdfFormat = RDFFormat.valueOf(rdfForm_s);
-
-            Path file = fileSplit.getPath();
-            FileSystem fs = file.getFileSystem(conf);
-            FSDataInputStream fileIn = fs.open(fileSplit.getPath());
-
-            RDFParser rdfParser = Rio.createParser(rdfFormat);
-            rdfParser.setRDFHandler(this);
-            try {
-                rdfParser.parse(fileIn, "");
-            } catch (Exception e) {
-                throw new IOException(e);
-            }
-            fileIn.close();
-            total = queue.size();
-            //TODO: Make this threaded so that you don't hold too many statements before sending them
-        }
-
-        @Override
-        public boolean nextKeyValue() throws IOException, InterruptedException {
-            return queue.size() > 0;
-        }
-
-        @Override
-        public LongWritable getCurrentKey() throws IOException, InterruptedException {
-            return new LongWritable(count++);
-        }
-
-        @Override
-        public BytesWritable getCurrentValue() throws IOException, InterruptedException {
-            return queue.poll();
-        }
-
-        @Override
-        public float getProgress() throws IOException, InterruptedException {
-            return ((float) (total - queue.size())) / ((float) total);
-        }
-
-        @Override
-        public void close() throws IOException {
-            closed = true;
-        }
-
-        @Override
-        public void startRDF() throws RDFHandlerException {
-        }
-
-        @Override
-        public void endRDF() throws RDFHandlerException {
-        }
-
-        @Override
-        public void handleNamespace(String s, String s1) throws RDFHandlerException {
-        }
-
-        @Override
-        public void handleStatement(Statement statement) throws RDFHandlerException {
-            try {
-                byte[] stmt_bytes = RdfIO.writeStatement(statement, true);
-                queue.add(new BytesWritable(stmt_bytes));
-            } catch (IOException e) {
-                throw new RDFHandlerException(e);
-            }
-        }
-
-        @Override
-        public void handleComment(String s) throws RDFHandlerException {
-        }
-    }
-//
-//    public static RDFParser createRdfParser(RDFFormat rdfFormat) {
-//        if (RDFFormat.RDFXML.equals(rdfFormat)) {
-//            return new RDFXMLParserFactory().getParser();
-//        } else if (RDFFormat.N3.equals(rdfFormat)) {
-//            return new N3ParserFactory().getParser();
-//        } else if (RDFFormat.NTRIPLES.equals(rdfFormat)) {
-//            return new NTriplesParserFactory().getParser();
-//        } else if (RDFFormat.TRIG.equals(rdfFormat)) {
-//            return new TriGParserFactory().getParser();
-//        } else if (RDFFormat.TRIX.equals(rdfFormat)) {
-//            return new TriXParserFactory().getParser();
-//        } else if (RDFFormat.TURTLE.equals(rdfFormat)) {
-//            return new TurtleParserFactory().getParser();
-//        }
-//        throw new IllegalArgumentException("Unknown RDFFormat[" + rdfFormat + "]");
-//    }
-//
-//    public static RDFWriter createRdfWriter(RDFFormat rdfFormat, OutputStream os) {
-//        if (RDFFormat.RDFXML.equals(rdfFormat)) {
-//            return new RDFXMLWriterFactory().getWriter(os);
-//        } else if (RDFFormat.N3.equals(rdfFormat)) {
-//            return new N3WriterFactory().getWriter(os);
-//        } else if (RDFFormat.NTRIPLES.equals(rdfFormat)) {
-//            return new NTriplesWriterFactory().getWriter(os);
-//        } else if (RDFFormat.TRIG.equals(rdfFormat)) {
-//            return new TriGWriterFactory().getWriter(os);
-//        } else if (RDFFormat.TRIX.equals(rdfFormat)) {
-//            return new TriXWriterFactory().getWriter(os);
-//        } else if (RDFFormat.TURTLE.equals(rdfFormat)) {
-//            return new TurtleWriterFactory().getWriter(os);
-//        }
-//        throw new IllegalArgumentException("Unknown RDFFormat[" + rdfFormat + "]");
-//    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToCloudbaseTool.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToCloudbaseTool.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToCloudbaseTool.java
deleted file mode 100644
index 12c1a4e..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToCloudbaseTool.java
+++ /dev/null
@@ -1,210 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.fileinput;
-
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Mutation;
-import com.google.common.io.ByteStreams;
-import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-import java.io.IOException;
-import java.util.Date;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-import static mvm.mmrts.rdf.partition.PartitionConstants.EMPTY_VALUE;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeStatement;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeValue;
-
-/**
- * Do bulk import of rdf files
- * Class RdfFileInputToCloudbaseTool
- * Date: May 16, 2011
- * Time: 3:12:16 PM
- */
-public class RdfFileInputToCloudbaseTool implements Tool {
-
-    public static final String CB_USERNAME_PROP = "cb.username";
-    public static final String CB_PWD_PROP = "cb.pwd";
-    public static final String CB_SERVER_PROP = "cb.server";
-    public static final String CB_PORT_PROP = "cb.port";
-    public static final String CB_INSTANCE_PROP = "cb.instance";
-    public static final String CB_TTL_PROP = "cb.ttl";
-    public static final String CB_TABLE_PROP = "cb.table";
-
-
-    private Configuration conf;
-
-    private String userName = "root";
-    private String pwd = "password";
-    private String instance = "stratus";
-    private String server = "10.40.190.113";
-    private String port = "2181";
-    private String table = "partitionRdf";
-
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new RdfFileInputToCloudbaseTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public long runJob(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
-        //faster
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-        conf.set("io.sort.mb", "256");
-
-        server = conf.get(CB_SERVER_PROP, server);
-        port = conf.get(CB_PORT_PROP, port);
-        instance = conf.get(CB_INSTANCE_PROP, instance);
-        userName = conf.get(CB_USERNAME_PROP, userName);
-        pwd = conf.get(CB_PWD_PROP, pwd);
-        table = conf.get(CB_TABLE_PROP, table);
-        conf.set(CB_TABLE_PROP, table);
-
-        Job job = new Job(conf);
-        job.setJarByClass(RdfFileInputToCloudbaseTool.class);
-
-        // set up cloudbase input
-        job.setInputFormatClass(RdfFileInputFormat.class);
-        RdfFileInputFormat.addInputPath(job, new Path(args[0]));
-
-        // set input output of the particular job
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(BytesWritable.class);
-        job.setOutputKeyClass(Text.class);
-        job.setOutputValueClass(Mutation.class);
-
-        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-        CloudbaseOutputFormat.setOutputInfo(job, userName, pwd.getBytes(), true, table);
-        CloudbaseOutputFormat.setZooKeeperInstance(job, instance, server + ":" + port);
-
-        // set mapper and reducer classes
-        job.setMapperClass(OutSubjStmtMapper.class);
-        job.setReducerClass(StatementToMutationReducer.class);
-
-        // set output
-//        Path outputDir = new Path("/temp/sparql-out/testout");
-//        FileSystem dfs = FileSystem.get(outputDir.toUri(), conf);
-//        if (dfs.exists(outputDir))
-//            dfs.delete(outputDir, true);
-//
-//        FileOutputFormat.setOutputPath(job, outputDir);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    @Override
-    public int run(String[] args) throws Exception {
-        runJob(args);
-        return 0;
-    }
-
-    public static class OutSubjStmtMapper extends Mapper<LongWritable, BytesWritable, Text, BytesWritable> {
-
-        public OutSubjStmtMapper() {
-        }
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-        }
-
-        @Override
-        protected void map(LongWritable key, BytesWritable value, Context context) throws IOException, InterruptedException {
-            Statement statement = RdfIO.readStatement(ByteStreams.newDataInput(value.getBytes()), ValueFactoryImpl.getInstance());
-            context.write(new Text(new String(writeValue(statement.getSubject())) + FAMILY_DELIM_STR), value);
-        }
-
-    }
-
-    public static class StatementToMutationReducer extends Reducer<Text, BytesWritable, Text, Mutation> {
-        private Text outputTable;
-        private DateHashModShardValueGenerator gen;
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            outputTable = new Text(context.getConfiguration().get(CB_TABLE_PROP, null));
-            gen = new DateHashModShardValueGenerator();
-        }
-
-        @Override
-        protected void reduce(Text key, Iterable<BytesWritable> values, Context context) throws IOException, InterruptedException {
-            Resource subject = (Resource) RdfIO.readValue(ByteStreams.newDataInput(key.getBytes()), ValueFactoryImpl.getInstance(), FAMILY_DELIM);
-            byte[] subj_bytes = writeValue(subject);
-            String shard = gen.generateShardValue(subject);
-            Text shard_txt = new Text(shard);
-
-            /**
-             * Triple - >
-             *- < subject ><shard >:
-             *- < shard > event:<subject >\0 < predicate >\0 < object >\0
-             *- < shard > index:<predicate >\1 < object >\0
-             */
-            Mutation m_subj = new Mutation(shard_txt);
-            for (BytesWritable stmt_bytes : values) {
-                Statement stmt = RdfIO.readStatement(ByteStreams.newDataInput(stmt_bytes.getBytes()), ValueFactoryImpl.getInstance());
-                m_subj.put(DOC, new Text(writeStatement(stmt, true)), EMPTY_VALUE);
-                m_subj.put(INDEX, new Text(writeStatement(stmt, false)), EMPTY_VALUE);
-            }
-
-            /**
-             * TODO: Is this right?
-             * If the subject does not have any authorizations specified, then anyone can access it.
-             * But the true authorization check will happen at the predicate/object level, which means that
-             * the set returned will only be what the person is authorized to see.  The shard lookup table has to
-             * have the lowest level authorization all the predicate/object authorizations; otherwise,
-             * a user may not be able to see the correct document.
-             */
-            Mutation m_shard = new Mutation(new Text(subj_bytes));
-            m_shard.put(shard_txt, EMPTY_TXT, EMPTY_VALUE);
-
-            context.write(outputTable, m_subj);
-            context.write(outputTable, m_shard);
-        }
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToFileTool.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToFileTool.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToFileTool.java
deleted file mode 100644
index e677d12..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToFileTool.java
+++ /dev/null
@@ -1,159 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.fileinput;
-
-import com.google.common.io.ByteStreams;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.model.Statement;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-import java.io.IOException;
-import java.util.Date;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.FAMILY_DELIM_STR;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeValue;
-
-/**
- * Do bulk import of rdf files
- * Class RdfFileInputToCloudbaseTool
- * Date: May 16, 2011
- * Time: 3:12:16 PM
- */
-public class RdfFileInputToFileTool implements Tool {
-
-    private Configuration conf;
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new RdfFileInputToFileTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public long runJob(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
-        if (args.length < 2)
-            throw new IllegalArgumentException("Usage: RdfFileInputToFileTool <input directory> <output directory>");
-
-        //faster
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-        conf.set("io.sort.mb", "256");
-
-        Job job = new Job(conf);
-        job.setJarByClass(RdfFileInputToFileTool.class);
-
-        // set up cloudbase input
-        job.setInputFormatClass(RdfFileInputFormat.class);
-        RdfFileInputFormat.addInputPath(job, new Path(args[0]));
-
-        // set input output of the particular job
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(Text.class);
-        job.setOutputKeyClass(NullWritable.class);
-        job.setOutputValueClass(Text.class);
-
-
-        // set mapper and reducer classes
-        job.setMapperClass(StmtToBytesMapper.class);
-        job.setReducerClass(StmtBytesReducer.class);
-
-        // set output
-        job.setOutputFormatClass(TextOutputFormat.class);
-        Path outputDir = new Path(args[1]);
-        FileSystem dfs = FileSystem.get(outputDir.toUri(), conf);
-        if (dfs.exists(outputDir))
-            dfs.delete(outputDir, true);
-
-        FileOutputFormat.setOutputPath(job, outputDir);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return job
-                    .getCounters()
-                    .findCounter("org.apache.hadoop.mapred.Task$Counter",
-                            "REDUCE_OUTPUT_RECORDS").getValue();
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    @Override
-    public int run(String[] args) throws Exception {
-        runJob(args);
-        return 0;
-    }
-
-    public static class StmtToBytesMapper extends Mapper<LongWritable, BytesWritable, Text, Text> {
-
-        Text outKey = new Text();
-        Text outValue = new Text();
-
-        public StmtToBytesMapper() {
-        }
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-        }
-
-        @Override
-        protected void map(LongWritable key, BytesWritable value, Context context) throws IOException, InterruptedException {
-            Statement statement = RdfIO.readStatement(ByteStreams.newDataInput(value.getBytes()), ValueFactoryImpl.getInstance());
-            outKey.set(new String(writeValue(statement.getSubject())) + FAMILY_DELIM_STR);
-            outValue.set(value.getBytes());
-            context.write(outKey, outValue);
-        }
-
-    }
-
-    public static class StmtBytesReducer extends Reducer<Text, Text, NullWritable, Text> {
-
-        NullWritable outKey = NullWritable.get();
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-        }
-
-        @Override
-        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
-            for (Text stmt_txt : values) {
-                context.write(outKey, stmt_txt);
-            }
-        }
-    }
-}
-


[37/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java
index 5403763..caac3a9 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserConstants.java
@@ -2,26 +2,907 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+
 /**
  * Token literal values and constants.
  * Generated by org.javacc.parser.OtherFilesGen#start()

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java
index 68409cc..5c92c7b 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTokenManager.java
@@ -2,24 +2,905 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.StringReader;
 
 /** Token Manager. */


[15/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 70cfb60..5f2164f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,122 +1,203 @@
 <?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
 
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
-    <groupId>mvm.rya</groupId>
-    <artifactId>parent</artifactId>
+    <parent>
+        <groupId>org.apache</groupId>
+        <artifactId>apache</artifactId>
+        <version>17</version>
+    </parent>
+
+    <groupId>org.apache.rya</groupId>
+    <artifactId>rya-project</artifactId>
     <version>3.2.10-SNAPSHOT</version>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
+    <name>Apache Rya Project</name>
+    <!-- this is the year of inception at ASF -->
+    <inceptionYear>2015</inceptionYear>
+    <organization>
+        <name>The Apache Software Foundation</name>
+        <url>http://www.apache.org/</url>
+    </organization>
     <licenses>
-      <license>
-        <name>Apache License, Version 2.0</name>
-        <url>http://www.apache.org/licenses/LICENSE-2.0</url>
-      </license>
+        <license>
+            <name>Apache License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0</url>
+        </license>
     </licenses>
+    <mailingLists>
+        <mailingList>
+            <name>Dev</name>
+            <subscribe>dev-subscribe@rya.incubator.apache.org</subscribe>
+            <unsubscribe>dev-unsubscribe@rya.incubator.apache.org</unsubscribe>
+            <post>dev@rya.incubator.apache.org</post>
+            <archive>http://mail-archives.apache.org/mod_mbox/rya-dev</archive>
+        </mailingList>
+    </mailingLists>
     <prerequisites>
-      <maven>${maven.min-version}</maven>
+        <maven>${maven.min-version}</maven>
     </prerequisites>
     <modules>
         <module>common</module>
-        <module>iterators</module>
         <module>dao</module>
         <module>extras</module>
         <module>osgi</module>
         <module>pig</module>
         <module>sail</module>
-        <module>utils</module>
         <module>web</module>
     </modules>
     <properties>
-        <accumulo.version>1.6.2</accumulo.version>
-        <blueprints.version>2.5.0</blueprints.version>
-        <commons.lang.version>2.5</commons.lang.version>
-        <gmaven.version>1.3</gmaven.version>
-        <groovy.version>2.3.4</groovy.version>
-        <guava.version>13.0</guava.version>
-        <hadoop.version>2.5.0-cdh5.3.3</hadoop.version>
-        <hadoop.core.version>2.5.0-mr1-cdh5.3.3</hadoop.core.version>
-        <junit.version>4.8.1</junit.version>
-        <karaf.version>2.2.4</karaf.version>
-        <mango.version>1.2.0</mango.version>
-        <!-- the maven-release-plugin makes this recommendation, due to plugin bugs -->
-        <maven.min-version>3.0.4</maven.min-version>
-        <openrdf.sesame.version>2.7.6</openrdf.sesame.version>
+        <openrdf.sesame.version>2.7.6</openrdf.sesame.version> <!-- Newest: 4.0.0 -->
+        <!--Cannot upgrade to openrdf.sesame 2.7.6 until RYA-9 is resolved -->
+
+        <accumulo.version>1.6.4</accumulo.version> <!-- Newest: 1.7.0 -->
+        <hadoop.version>2.5.2</hadoop.version> <!-- Newest: 2.7.1 -->
+
+        <pig.version>0.9.2</pig.version> <!-- Newest: 0.15.0 -->
+
+        <geomesa.version>1.1.0-rc.6</geomesa.version> <!-- Newest: 1.1.0-rc.6 -->
+        <lucene.version>3.6.2</lucene.version> <!-- Newest: 5.3.1 -->
+        <joda-time.version>2.1</joda-time.version> <!-- Newest: 2.9.1 -->
+
+        <mongodb.version>2.13.3</mongodb.version> <!-- Newest: 3.1.1 -->
+        <embed.mongo.version>1.50.0</embed.mongo.version> <!-- Newest: 1.50.0 -->
+
+        <blueprints.version>2.5.0</blueprints.version> <!-- Newest: Apache 3.1.0-incubating -->
+
+        <commons.lang.version>2.6</commons.lang.version> <!-- Newest: 2.6 -->
+        <commons.codec.version>1.10</commons.codec.version> <!-- Newest: 1.10 -->
+        <commons.pool.version>1.6</commons.pool.version> <!-- Newest: 1.6 -->
+
+        <gmaven.version>1.3</gmaven.version> <!-- Newest: 1.5 -->
+        <groovy.version>2.3.11</groovy.version> <!-- Newest: 2.4.5 -->
+
+        <guava.version>18.0</guava.version> <!-- Newest: 18.0 -->
+
+        <karaf.version>2.2.11</karaf.version> <!-- Newest: 4.0.3 -->
+        <mango.version>1.2.0</mango.version> <!-- Newest: 1.4.0 -->
+        <antlr-runtime.version>3.4</antlr-runtime.version> <!-- Newest: 3.5.2 -->
+        <ehcache.version>1.7.2</ehcache.version> <!-- Newest: 2.6.11 -->
+        <hamcrest.version>1.3</hamcrest.version> <!-- Newest: 1.3 -->
+        <camel.version>2.7.5</camel.version> <!-- Newest: 2.16.1 -->
+
+        <spring.version>3.2.15.RELEASE</spring.version> <!-- Newest: 4.2.3.RELEASE -->
+        <spring.hadoop.version>1.0.2.RELEASE</spring.hadoop.version> <!-- Newest: 2.2.1.RELEASE -->
+        <spring.shell.version>1.1.0.RELEASE</spring.shell.version> <!-- Newest: 1.1.0.RELEASE -->
+
+        <junit.version>4.12</junit.version> <!-- Newest: 4.12 -->
+        <mockito.version>1.10.19</mockito.version> <!-- Newest: 1.10.19 -->
+        <mrunit.version>1.1.0</mrunit.version> <!-- Newest: 1.1.0 -->
+        <slf4j.version>1.6.6</slf4j.version> <!-- Newest: 1.7.13 -->
+
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-        <slf4j.version>1.6.4</slf4j.version>
-        <zookeeper.version>3.4.5-cdh5.3.3</zookeeper.version>
-        <joda-time.version>2.1</joda-time.version>
-        <pig.version>0.9.2</pig.version>
-        <antlr-runtime.version>3.4-beta4</antlr-runtime.version>
-        <ehcache.version>1.7.1</ehcache.version>
-        <geomesa.version>1.1.0-rc.4</geomesa.version>
+
+        <!-- the maven-release-plugin makes this recommendation, due to plugin bugs -->
+        <maven.min-version>3.0.4</maven.min-version>
     </properties>
     <dependencyManagement>
         <dependencies>
             <dependency>
-                <groupId>mvm.rya</groupId>
+                <groupId>org.apache.rya</groupId>
                 <artifactId>rya.api</artifactId>
                 <version>${project.version}</version>
             </dependency>
             <dependency>
-                <groupId>mvm.rya</groupId>
-                <artifactId>rya.sail.impl</artifactId>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.sail</artifactId>
                 <version>${project.version}</version>
             </dependency>
             <dependency>
-                <groupId>mvm.rya</groupId>
+                <groupId>org.apache.rya</groupId>
                 <artifactId>accumulo.rya</artifactId>
                 <version>${project.version}</version>
             </dependency>
-           <dependency>
-                <groupId>mvm.rya</groupId>
+            <dependency>
+                <groupId>org.apache.rya</groupId>
                 <artifactId>mongodb.rya</artifactId>
                 <version>${project.version}</version>
             </dependency>
             <dependency>
-                <groupId>mvm.rya</groupId>
+                <groupId>org.apache.rya</groupId>
                 <artifactId>accumulo.utils</artifactId>
                 <version>${project.version}</version>
             </dependency>
             <dependency>
-                <groupId>mvm.rya</groupId>
-                <artifactId>accumulo.iterators</artifactId>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.prospector</artifactId>
                 <version>${project.version}</version>
             </dependency>
             <dependency>
-                <groupId>mvm.rya</groupId>
-                <artifactId>rya.prospector</artifactId>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.provenance</artifactId>
                 <version>${project.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.apache.accumulo</groupId>
-                <artifactId>cloudtrace</artifactId>
-                <version>${accumulo.version}</version>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.indexing</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.indexing</artifactId>
+                <classifier>accumulo-server</classifier>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.indexing</artifactId>
+                <classifier>map-reduce</classifier>
+                <version>${project.version}</version>
             </dependency>
+
             <dependency>
                 <groupId>org.apache.accumulo</groupId>
                 <artifactId>accumulo-core</artifactId>
                 <version>${accumulo.version}</version>
             </dependency>
+
             <dependency>
-                <groupId>org.apache.accumulo</groupId>
-                <artifactId>accumulo-start</artifactId>
-                <version>${accumulo.version}</version>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>sesame-runtime-osgi</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.apache.thrift</groupId>
-                <artifactId>libthrift</artifactId>
-                <version>0.9.0</version>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-runtime</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>mvm.rya</groupId>
-                <artifactId>sesame-runtime-osgi</artifactId>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-model</artifactId>
                 <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
                 <groupId>org.openrdf.sesame</groupId>
-                <artifactId>sesame-runtime</artifactId>
+                <artifactId>sesame-query</artifactId>
+                <version>${openrdf.sesame.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-queryalgebra-model</artifactId>
                 <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
@@ -125,7 +206,7 @@
                 <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.op2.6.4enrdf.sesame</groupId>
+                <groupId>org.openrdf.sesame</groupId>
                 <artifactId>sesame-queryresultio-sparqlxml</artifactId>
                 <version>${openrdf.sesame.version}</version>
             </dependency>
@@ -150,60 +231,73 @@
                 </exclusions>
             </dependency>
             <dependency>
-                <groupId>com.google.guava</groupId>
-                <artifactId>guava</artifactId>
-                <version>${guava.version}</version>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-rio-ntriples</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-api</artifactId>
-                <version>${slf4j.version}</version>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-rio-nquads</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-log4j12</artifactId>
-                <version>${slf4j.version}</version>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-rio-trig</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>cloudtrace</groupId>
-                <artifactId>cloudtrace</artifactId>
-                <version>0.1.3</version>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-queryalgebra-evaluation</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.apache.thrift</groupId>
-                <artifactId>thrift</artifactId>
-                <version>0.3</version>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-queryresultio-sparqljson</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.apache.zookeeper</groupId>
-                <artifactId>zookeeper</artifactId>
-                <version>${zookeeper.version}</version>
+                <groupId>org.openrdf.sesame</groupId>
+                <artifactId>sesame-repository-api</artifactId>
+                <version>${openrdf.sesame.version}</version>
             </dependency>
+
             <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-common</artifactId>
-                <version>${hadoop.version}</version>
-                <!--<scope>provided</scope>-->
+                <groupId>com.google.guava</groupId>
+                <artifactId>guava</artifactId>
+                <version>${guava.version}</version>
             </dependency>
             <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-core</artifactId>
-                <version>${hadoop.core.version}</version>
-                <!--<scope>provided</scope>-->
+                <groupId>org.hamcrest</groupId>
+                <artifactId>hamcrest-all</artifactId>
+                <version>${hamcrest.version}</version>
             </dependency>
 
-            <!-- Test -->
             <dependency>
-                <groupId>org.mockito</groupId>
-                <artifactId>mockito-all</artifactId>
-                <version>1.8.0-rc2</version>
-                <scope>test</scope>
+                <groupId>org.apache.camel</groupId>
+                <artifactId>camel-core</artifactId>
+                <version>${camel.version}</version>
             </dependency>
             <dependency>
-                <groupId>junit</groupId>
-                <artifactId>junit</artifactId>
-                <version>${junit.version}</version>
-                <scope>test</scope>
+                <groupId>org.apache.camel</groupId>
+                <artifactId>camel-test</artifactId>
+                <version>${camel.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.slf4j</groupId>
+                <artifactId>slf4j-api</artifactId>
+                <version>${slf4j.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.slf4j</groupId>
+                <artifactId>slf4j-log4j12</artifactId>
+                <version>${slf4j.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-common</artifactId>
+                <version>${hadoop.version}</version>
             </dependency>
 
             <dependency>
@@ -221,6 +315,22 @@
                 <artifactId>gremlin-groovy</artifactId>
                 <version>${blueprints.version}</version>
             </dependency>
+            <dependency>
+                <groupId>com.tinkerpop.rexster</groupId>
+                <artifactId>rexster-server</artifactId>
+                <version>${blueprints.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.mongodb</groupId>
+                <artifactId>mongo-java-driver</artifactId>
+                <version>${mongodb.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>de.flapdoodle.embed</groupId>
+                <artifactId>de.flapdoodle.embed.mongo</artifactId>
+                <version>${embed.mongo.version}</version>
+            </dependency>
 
             <!-- Groovy -->
             <dependency>
@@ -240,12 +350,95 @@
                 </exclusions>
             </dependency>
 
+            <!-- Spring -->
+            <dependency>
+                <groupId>org.springframework.shell</groupId>
+                <artifactId>spring-shell</artifactId>
+                <version>${spring.shell.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework</groupId>
+                <artifactId>spring-context</artifactId>
+                <version>${spring.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework</groupId>
+                <artifactId>spring-core</artifactId>
+                <version>${spring.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework</groupId>
+                <artifactId>spring-web</artifactId>
+                <version>${spring.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework</groupId>
+                <artifactId>spring-webmvc</artifactId>
+                <version>${spring.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework</groupId>
+                <artifactId>spring-beans</artifactId>
+                <version>${spring.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework</groupId>
+                <artifactId>spring-test</artifactId>
+                <version>${spring.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework.data</groupId>
+                <artifactId>spring-data-hadoop</artifactId>
+                <version>${spring.hadoop.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.apache.karaf.shell</groupId>
+                <artifactId>org.apache.karaf.shell.console</artifactId>
+                <version>${karaf.version}</version>
+                <scope>provided</scope>
+            </dependency>
+
             <!-- Commons -->
             <dependency>
                 <groupId>commons-lang</groupId>
                 <artifactId>commons-lang</artifactId>
                 <version>${commons.lang.version}</version>
             </dependency>
+            <dependency>
+                <groupId>commons-codec</groupId>
+                <artifactId>commons-codec</artifactId>
+                <version>${commons.codec.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>commons-pool</groupId>
+                <artifactId>commons-pool</artifactId>
+                <version>${commons.pool.version}</version>
+            </dependency>
+
+            <!-- Free Text Indexing -->
+            <dependency>
+                <groupId>org.apache.lucene</groupId>
+                <artifactId>lucene-core</artifactId>
+                <version>${lucene.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.lucene</groupId>
+                <artifactId>lucene-analyzers</artifactId>
+                <version>${lucene.version}</version>
+            </dependency>
+
+            <!-- Geo Indexing -->
+            <dependency>
+                <groupId>org.locationtech.geomesa</groupId>
+                <artifactId>geomesa-accumulo-datastore</artifactId>
+                <version>${geomesa.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.locationtech.geomesa</groupId>
+                <artifactId>geomesa-distributed-runtime</artifactId>
+                <version>${geomesa.version}</version>
+            </dependency>
 
             <dependency>
                 <groupId>joda-time</groupId>
@@ -276,6 +469,26 @@
                 <artifactId>mango-core</artifactId>
                 <version>${mango.version}</version>
             </dependency>
+
+            <dependency>
+                <groupId>org.mockito</groupId>
+                <artifactId>mockito-all</artifactId>
+                <version>${mockito.version}</version>
+                <scope>test</scope>
+            </dependency>
+            <dependency>
+                <groupId>junit</groupId>
+                <artifactId>junit</artifactId>
+                <version>${junit.version}</version>
+                <scope>test</scope>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.mrunit</groupId>
+                <artifactId>mrunit</artifactId>
+                <version>${mrunit.version}</version>
+                <classifier>hadoop2</classifier>
+                <scope>test</scope>
+            </dependency>
         </dependencies>
     </dependencyManagement>
 
@@ -285,7 +498,6 @@
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-compiler-plugin</artifactId>
-                    <version>2.5.1</version>
                     <configuration>
                         <encoding>${project.build.sourceEncoding}</encoding>
                     </configuration>
@@ -300,7 +512,6 @@
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-surefire-plugin</artifactId>
-                    <version>2.5</version>
                     <configuration>
                         <argLine>-Dfile.encoding=${project.build.sourceEncoding}</argLine>
                     </configuration>
@@ -308,17 +519,14 @@
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-checkstyle-plugin</artifactId>
-                    <version>2.5</version>
                 </plugin>
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-jar-plugin</artifactId>
-                    <version>2.2</version>
                 </plugin>
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-javadoc-plugin</artifactId>
-                    <version>2.10</version>
                     <configuration>
                         <skip>true</skip>
                     </configuration>
@@ -326,12 +534,10 @@
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-dependency-plugin</artifactId>
-                    <version>2.0</version>
                 </plugin>
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-enforcer-plugin</artifactId>
-                    <version>1.3.1</version>
                     <configuration>
                         <rules>
                             <requireMavenVersion>
@@ -343,7 +549,6 @@
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-release-plugin</artifactId>
-                    <version>2.5</version>
                     <configuration>
                         <allowTimestampedSnapshots>true</allowTimestampedSnapshots>
                         <autoVersionSubmodules>true</autoVersionSubmodules>
@@ -389,7 +594,6 @@
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-shade-plugin</artifactId>
-                    <version>1.6</version>
                     <configuration>
                         <shadedArtifactAttached>true</shadedArtifactAttached>
                     </configuration>
@@ -403,41 +607,44 @@
                     </executions>
                 </plugin>
                 <plugin>
-                  <groupId>org.codehaus.mojo</groupId>
-                  <artifactId>license-maven-plugin</artifactId>
-                  <version>1.8</version>
-                  <configuration>
-                    <licenseName>apache_v2</licenseName>
-                    <inceptionYear>2014</inceptionYear>
-                    <organizationName>Rya</organizationName>
-                    <roots>
-                      <root>src/main/java</root>
-                      <root>src/test</root>
-                    </roots>
-                  </configuration>
-                  <executions>
-                    <execution>
-                      <id>first</id>
-                      <goals>
-                        <goal>update-file-header</goal>
-                      </goals>
-                      <phase>process-sources</phase>
-                    </execution>
-                  </executions>
+                    <!-- Apache Release Audit Tool - reports missing license headers and other issues. -->
+                    <!-- mvn apache-rat:rat -->
+                    <!-- mvn apache-rat:check -->
+                    <groupId>org.apache.rat</groupId>
+                    <artifactId>apache-rat-plugin</artifactId>
+                    <configuration>
+                        <excludes>
+                            <!-- RDF data Files -->
+                            <exclude>**/*.ntriples</exclude>
+                            <exclude>**/*.trig</exclude>
+                            <exclude>**/*.ttl</exclude>
+                            <exclude>**/*.owl</exclude>
+                            <exclude>**/*.nt</exclude>
+
+                            <!-- Services Files -->
+                            <exclude>**/resources/META-INF/services/**</exclude>
+                        </excludes>
+                    </configuration>
                 </plugin>
             </plugins>
         </pluginManagement>
+
         <plugins>
-           <plugin>
-               <artifactId>maven-source-plugin</artifactId>
-               <executions>
-                   <execution>
-                       <id>attach-sources</id>
-                       <phase>package</phase>
-                       <goals>
-                           <goal>jar-no-fork</goal>
-                       </goals>
-                   </execution>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>2.3</version>
+            </plugin>
+            <plugin>
+                <artifactId>maven-source-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>attach-sources</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>jar-no-fork</goal>
+                        </goals>
+                    </execution>
                 </executions>
             </plugin>
             <plugin>
@@ -472,35 +679,9 @@
             </plugin>
         </plugins>
     </build>
-    <dependencies>
-    </dependencies>
+
     <repositories>
         <repository>
-            <id>cloudera</id>
-            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-        </repository>
-        <repository>
-            <releases>
-                <enabled>true</enabled>
-            </releases>
-            <snapshots>
-                <enabled>false</enabled>
-            </snapshots>
-            <id>aduna-opensource.releases</id>
-            <name>Aduna Open Source - Maven releases</name>
-            <url>http://maven.ontotext.com/content/repositories/aduna</url>
-        </repository>
-        <repository>
-            <releases>
-                <enabled>true</enabled>
-            </releases>
-            <snapshots>
-                <enabled>false</enabled>
-            </snapshots>
-            <id>fortytwo</id>
-            <url>http://fortytwo.net/maven2</url>
-        </repository>
-        <repository>
             <id>LocationTech - SNAPSHOT</id>
             <url>https://repo.locationtech.org/content/repositories/snapshots/</url>
         </repository>
@@ -509,10 +690,11 @@
             <url>https://repo.locationtech.org/content/repositories/releases/</url>
         </repository>
     </repositories>
+
     <scm>
-        <connection>scm:git:git@github.com:LAS-NCSU/rya.git</connection>
-        <developerConnection>scm:git:git@github.com:LAS-NCSU/rya.git</developerConnection>
-        <url>scm:git:git@github.com:LAS-NCSU/rya.git</url>
-      <tag>parent-3.2.9</tag>
-  </scm>
+        <connection>scm:git:git://git.apache.org/incubator-rya.git</connection>
+        <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-rya.git</developerConnection>
+        <tag>v3.2.10-SNAPSHOT</tag>
+        <url>https://git-wip-us.apache.org/repos/asf?p=incubator-rya.git</url>
+    </scm>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/pom.xml
----------------------------------------------------------------------
diff --git a/sail/pom.xml b/sail/pom.xml
index 6147d07..dfd2811 100644
--- a/sail/pom.xml
+++ b/sail/pom.xml
@@ -1,16 +1,98 @@
-<?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
 
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.sail</artifactId>
-    <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <modules>
-        <module>rya.sail.impl</module>
-    </modules>
+    <name>Apache Rya SAIL</name>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.provenance</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.prospector</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>net.sf.ehcache</groupId>
+            <artifactId>ehcache-core</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.tinkerpop.blueprints</groupId>
+            <artifactId>blueprints-core</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.openrdf.sesame</groupId>
+            <artifactId>sesame-runtime</artifactId>
+        </dependency>
+
+        <!-- Test -->
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>accumulo.rya</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <configuration>
+                    <excludes combine.children="append">
+                        <exclude>**/resources/META-INF/org.openrdf.store.schemas</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/pom.xml
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/pom.xml b/sail/rya.sail.impl/pom.xml
deleted file mode 100644
index 5e6b88a..0000000
--- a/sail/rya.sail.impl/pom.xml
+++ /dev/null
@@ -1,126 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.sail</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <artifactId>rya.sail.impl</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <properties>
-    </properties>
-    <dependencies>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.api</artifactId>
-        </dependency>
-      <dependency>
-        	<groupId>mvm.rya</groupId>
-        	<artifactId>rya.provenance</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <!-- Ehcache -->
-        <dependency>
-            <groupId>net.sf.ehcache</groupId>
-            <artifactId>ehcache-core</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <!--<scope>provided</scope> -->
-        </dependency>
-
-        <!-- Test -->
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>accumulo.rya</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.tinkerpop.blueprints</groupId>
-            <artifactId>blueprints-core</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-runtime</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.zookeeper</groupId>
-            <artifactId>zookeeper</artifactId>
-            <exclusions>
-                <!-- the log4j that comes with zookeeper 3.3.5 has some bad dependencies -->
-                <exclusion>
-                    <groupId>javax.jms</groupId>
-                    <artifactId>jms</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jdmk</groupId>
-                    <artifactId>jmxtools</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jmx</groupId>
-                    <artifactId>jmxri</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-        	<groupId>mvm.rya</groupId>
-        	<artifactId>rya.prospector</artifactId>
-        </dependency>
-    </dependencies>
-
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                    <scope>test</scope>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
-
-    <build>
-      <plugins>
-        <plugin>
-          <artifactId>maven-surefire-plugin</artifactId>
-          <configuration>
-            <argLine>-Dfile.encoding=UTF-8</argLine>
-          </configuration>
-        </plugin>
-      </plugins>
-    </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java
deleted file mode 100644
index 9cbb763..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStore.java
+++ /dev/null
@@ -1,178 +0,0 @@
-package mvm.rya.rdftriplestore;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.persist.RdfEvalStatsDAO;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-import mvm.rya.rdftriplestore.namespace.NamespaceManager;
-import mvm.rya.rdftriplestore.provenance.ProvenanceCollector;
-
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.sail.SailConnection;
-import org.openrdf.sail.SailException;
-import org.openrdf.sail.helpers.SailBase;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-public class RdfCloudTripleStore extends SailBase {
-
-    private RdfCloudTripleStoreConfiguration conf;
-
-    protected RyaDAO ryaDAO;
-    protected InferenceEngine inferenceEngine;
-    protected RdfEvalStatsDAO rdfEvalStatsDAO;
-    protected SelectivityEvalDAO selectEvalDAO;
-    private NamespaceManager namespaceManager;
-    protected ProvenanceCollector provenanceCollector;
-
-    private ValueFactory vf = new ValueFactoryImpl();
-
-    @Override
-    protected SailConnection getConnectionInternal() throws SailException {
-        return new RdfCloudTripleStoreConnection(this, conf, vf);
-    }
-
-    @Override
-    protected void initializeInternal() throws SailException {
-        checkNotNull(ryaDAO);
-
-        if (this.conf == null) {
-            this.conf = ryaDAO.getConf();
-        }
-
-        checkNotNull(this.conf);
-
-        try {
-            if (!ryaDAO.isInitialized()) {
-                ryaDAO.setConf(this.conf);
-                ryaDAO.init();
-            }
-        } catch (RyaDAOException e) {
-            throw new SailException(e);
-        }
-
-        if (rdfEvalStatsDAO != null && !rdfEvalStatsDAO.isInitialized()) {
-            rdfEvalStatsDAO.setConf(this.conf);
-            rdfEvalStatsDAO.init();
-        }
-
-        //TODO: Support inferencing with ryadao
-//        if (inferenceEngine != null && !inferenceEngine.isInitialized()) {
-//            inferenceEngine.setConf(this.conf);
-//            inferenceEngine.setRyaDAO(ryaDAO);
-//            inferenceEngine.init();
-//        }
-
-        if (namespaceManager == null) {
-            this.namespaceManager = new NamespaceManager(ryaDAO, this.conf);
-        }
-    }
-
-    @Override
-    protected void shutDownInternal() throws SailException {
-        try {
-            if (namespaceManager != null) {
-                namespaceManager.shutdown();
-            }
-            if (inferenceEngine != null) {
-                inferenceEngine.destroy();
-            }
-            if (rdfEvalStatsDAO != null) {
-                rdfEvalStatsDAO.destroy();
-            }
-            ryaDAO.destroy();
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    public ValueFactory getValueFactory() {
-        return vf;
-    }
-
-    @Override
-    public boolean isWritable() throws SailException {
-        return true;
-    }
-
-    public RdfCloudTripleStoreConfiguration getConf() {
-        return conf;
-    }
-
-    public void setConf(RdfCloudTripleStoreConfiguration conf) {
-        this.conf = conf;
-    }
-
-    public RdfEvalStatsDAO getRdfEvalStatsDAO() {
-        return rdfEvalStatsDAO;
-    }
-
-    public void setRdfEvalStatsDAO(RdfEvalStatsDAO rdfEvalStatsDAO) {
-        this.rdfEvalStatsDAO = rdfEvalStatsDAO;
-    }
-    
-    public SelectivityEvalDAO getSelectEvalDAO() {
-        return selectEvalDAO;
-    }
-    
-    public void setSelectEvalDAO(SelectivityEvalDAO selectEvalDAO) {
-        this.selectEvalDAO = selectEvalDAO;
-    }
-
-    public RyaDAO getRyaDAO() {
-        return ryaDAO;
-    }
-
-    public void setRyaDAO(RyaDAO ryaDAO) {
-        this.ryaDAO = ryaDAO;
-    }
-
-    public InferenceEngine getInferenceEngine() {
-        return inferenceEngine;
-    }
-
-    public void setInferenceEngine(InferenceEngine inferenceEngine) {
-        this.inferenceEngine = inferenceEngine;
-    }
-
-    public NamespaceManager getNamespaceManager() {
-        return namespaceManager;
-    }
-
-    public void setNamespaceManager(NamespaceManager namespaceManager) {
-        this.namespaceManager = namespaceManager;
-    }
-
-    public ProvenanceCollector getProvenanceCollector() {
-		return provenanceCollector;
-	}
-
-	public void setProvenanceCollector(ProvenanceCollector provenanceCollector) {
-		this.provenanceCollector = provenanceCollector;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
deleted file mode 100644
index e19cfba..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
+++ /dev/null
@@ -1,622 +0,0 @@
-package mvm.rya.rdftriplestore;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-import info.aduna.iteration.CloseableIteration;
-
-import java.lang.reflect.Constructor;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.NoSuchElementException;
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.persist.RdfEvalStatsDAO;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
-import mvm.rya.api.persist.utils.RyaDAOHelper;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.rdftriplestore.evaluation.FilterRangeVisitor;
-import mvm.rya.rdftriplestore.evaluation.ParallelEvaluationStrategyImpl;
-import mvm.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer;
-import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics;
-import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics;
-import mvm.rya.rdftriplestore.evaluation.SeparateFilterJoinsVisitor;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-import mvm.rya.rdftriplestore.inference.InverseOfVisitor;
-import mvm.rya.rdftriplestore.inference.SameAsVisitor;
-import mvm.rya.rdftriplestore.inference.SubClassOfVisitor;
-import mvm.rya.rdftriplestore.inference.SubPropertyOfVisitor;
-import mvm.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
-import mvm.rya.rdftriplestore.inference.TransitivePropertyVisitor;
-import mvm.rya.rdftriplestore.namespace.NamespaceManager;
-import mvm.rya.rdftriplestore.provenance.ProvenanceCollectionException;
-import mvm.rya.rdftriplestore.provenance.ProvenanceCollector;
-import mvm.rya.rdftriplestore.utils.DefaultStatistics;
-
-import org.apache.hadoop.conf.Configurable;
-import org.openrdf.model.Namespace;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ContextStatementImpl;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.query.Binding;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.QueryRoot;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.EvaluationStrategy;
-import org.openrdf.query.algebra.evaluation.QueryBindingSet;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.evaluation.TripleSource;
-import org.openrdf.query.algebra.evaluation.impl.BindingAssigner;
-import org.openrdf.query.algebra.evaluation.impl.CompareOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter;
-import org.openrdf.query.algebra.evaluation.impl.ConstantOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
-import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.IterativeEvaluationOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.OrderLimitOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.QueryModelNormalizer;
-import org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer;
-import org.openrdf.query.impl.EmptyBindingSet;
-import org.openrdf.sail.SailException;
-import org.openrdf.sail.helpers.SailConnectionBase;
-
-public class RdfCloudTripleStoreConnection extends SailConnectionBase {
-
-    private RdfCloudTripleStore store;
-
-    private RdfEvalStatsDAO rdfEvalStatsDAO;
-    private SelectivityEvalDAO selectEvalDAO;
-    private RyaDAO ryaDAO;
-    private InferenceEngine inferenceEngine;
-    private NamespaceManager namespaceManager;
-    private RdfCloudTripleStoreConfiguration conf;
-    
-
-	private ProvenanceCollector provenanceCollector;
-
-    public RdfCloudTripleStoreConnection(RdfCloudTripleStore sailBase, RdfCloudTripleStoreConfiguration conf, ValueFactory vf)
-            throws SailException {
-        super(sailBase);
-        this.store = sailBase;
-        this.conf = conf;
-        initialize();
-    }
-
-    protected void initialize() throws SailException {
-        refreshConnection();
-    }
-
-    protected void refreshConnection() throws SailException {
-        try {
-            checkNotNull(store.getRyaDAO());
-            checkArgument(store.getRyaDAO().isInitialized());
-            checkNotNull(store.getNamespaceManager());
-
-            this.ryaDAO = store.getRyaDAO();
-            this.rdfEvalStatsDAO = store.getRdfEvalStatsDAO();
-            this.selectEvalDAO = store.getSelectEvalDAO();
-            this.inferenceEngine = store.getInferenceEngine();
-            this.namespaceManager = store.getNamespaceManager();
-            this.provenanceCollector = store.getProvenanceCollector();
-
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    protected void addStatementInternal(Resource subject, URI predicate,
-                                        Value object, Resource... contexts) throws SailException {
-        try {
-            String cv_s = conf.getCv();
-            byte[] cv = cv_s == null ? null : cv_s.getBytes();
-            if (contexts != null && contexts.length > 0) {
-                for (Resource context : contexts) {
-                    RyaStatement statement = new RyaStatement(
-                            RdfToRyaConversions.convertResource(subject),
-                            RdfToRyaConversions.convertURI(predicate),
-                            RdfToRyaConversions.convertValue(object),
-                            RdfToRyaConversions.convertResource(context),
-                            null, cv);
-
-                    ryaDAO.add(statement);
-                }
-            } else {
-                RyaStatement statement = new RyaStatement(
-                        RdfToRyaConversions.convertResource(subject),
-                        RdfToRyaConversions.convertURI(predicate),
-                        RdfToRyaConversions.convertValue(object),
-                        null, null, cv);
-
-                ryaDAO.add(statement);
-            }
-        } catch (RyaDAOException e) {
-            throw new SailException(e);
-        }
-    }
-
-    
-    
-    
-    @Override
-    protected void clearInternal(Resource... aresource) throws SailException {
-        try {
-            RyaURI[] graphs = new RyaURI[aresource.length];
-            for (int i = 0 ; i < graphs.length ; i++){
-                graphs[i] = RdfToRyaConversions.convertResource(aresource[i]);
-            }
-            ryaDAO.dropGraph(conf, graphs);
-        } catch (RyaDAOException e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    protected void clearNamespacesInternal() throws SailException {
-        logger.error("Clear Namespace Repository method not implemented");
-    }
-
-    @Override
-    protected void closeInternal() throws SailException {
-        verifyIsOpen();
-    }
-
-    @Override
-    protected void commitInternal() throws SailException {
-        verifyIsOpen();
-        //There is no transactional layer
-    }
-
-    @Override
-    protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(
-            TupleExpr tupleExpr, Dataset dataset, BindingSet bindings,
-            boolean flag) throws SailException {
-        verifyIsOpen();
-        logger.trace("Incoming query model:\n{}", tupleExpr.toString());
-        if (provenanceCollector != null){
-        	try {
-				provenanceCollector.recordQuery(tupleExpr.toString());
-			} catch (ProvenanceCollectionException e) {
-				// TODO silent fail
-				e.printStackTrace();
-			}
-        }
-        tupleExpr = tupleExpr.clone();
-
-        RdfCloudTripleStoreConfiguration queryConf = store.getConf().clone();
-        if (bindings != null) {
-            Binding dispPlan = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG);
-            if (dispPlan != null) {
-                queryConf.setDisplayQueryPlan(Boolean.parseBoolean(dispPlan.getValue().stringValue()));
-            }
-
-            Binding authBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH);
-            if (authBinding != null) {
-                queryConf.setAuths(authBinding.getValue().stringValue().split(","));
-            }
-
-            Binding ttlBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_TTL);
-            if (ttlBinding != null) {
-                queryConf.setTtl(Long.valueOf(ttlBinding.getValue().stringValue()));
-            }
-
-            Binding startTimeBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_STARTTIME);
-            if (startTimeBinding != null) {
-                queryConf.setStartTime(Long.valueOf(startTimeBinding.getValue().stringValue()));
-            }
-
-            Binding performantBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT);
-            if (performantBinding != null) {
-                queryConf.setBoolean(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT, Boolean.parseBoolean(performantBinding.getValue().stringValue()));
-            }
-
-            Binding inferBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_INFER);
-            if (inferBinding != null) {
-                queryConf.setInfer(Boolean.parseBoolean(inferBinding.getValue().stringValue()));
-            }
-
-            Binding useStatsBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_USE_STATS);
-            if (useStatsBinding != null) {
-                queryConf.setUseStats(Boolean.parseBoolean(useStatsBinding.getValue().stringValue()));
-            }
-
-            Binding offsetBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_OFFSET);
-            if (offsetBinding != null) {
-                queryConf.setOffset(Long.parseLong(offsetBinding.getValue().stringValue()));
-            }
-
-            Binding limitBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_LIMIT);
-            if (limitBinding != null) {
-                queryConf.setLimit(Long.parseLong(limitBinding.getValue().stringValue()));
-            }
-        } else {
-            bindings = new QueryBindingSet();
-        }
-
-        if (!(tupleExpr instanceof QueryRoot)) {
-            tupleExpr = new QueryRoot(tupleExpr);
-        }
-
-        try {
-            List<Class<QueryOptimizer>> optimizers = queryConf.getOptimizers();
-            Class<QueryOptimizer> pcjOptimizer = queryConf.getPcjOptimizer();
-            
-            if(pcjOptimizer != null) {
-                QueryOptimizer opt = null;
-                try {
-                    Constructor<QueryOptimizer> construct = pcjOptimizer.getDeclaredConstructor(new Class[] {});
-                    opt = construct.newInstance();
-                } catch (Exception e) {
-                }
-                if (opt == null) {
-                    throw new NoSuchMethodException("Could not find valid constructor for " + pcjOptimizer.getName());
-                }
-                if (opt instanceof Configurable) {
-                    ((Configurable) opt).setConf(conf);
-                }
-                opt.optimize(tupleExpr, dataset, bindings);
-            }
-            
-            final ParallelEvaluationStrategyImpl strategy = new ParallelEvaluationStrategyImpl(
-                    new StoreTripleSource(queryConf), inferenceEngine, dataset, queryConf);
-            
-                (new BindingAssigner()).optimize(tupleExpr, dataset, bindings);
-                (new ConstantOptimizer(strategy)).optimize(tupleExpr, dataset,
-                        bindings);
-                (new CompareOptimizer()).optimize(tupleExpr, dataset, bindings);
-                (new ConjunctiveConstraintSplitter()).optimize(tupleExpr, dataset,
-                        bindings);
-                (new DisjunctiveConstraintOptimizer()).optimize(tupleExpr, dataset,
-                        bindings);
-                (new SameTermFilterOptimizer()).optimize(tupleExpr, dataset,
-                        bindings);
-                (new QueryModelNormalizer()).optimize(tupleExpr, dataset, bindings);
-    
-                (new IterativeEvaluationOptimizer()).optimize(tupleExpr, dataset,
-                        bindings);
-
-            if (!optimizers.isEmpty()) {
-                for (Class<QueryOptimizer> optclz : optimizers) {
-                    QueryOptimizer result = null;
-                    try {
-                        Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(new Class[] {});
-                        result = meth.newInstance();
-                    } catch (Exception e) {
-                    }
-                    try {
-                        Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(EvaluationStrategy.class);
-                        result = meth.newInstance(strategy);
-                    } catch (Exception e) {
-                    }
-                    if (result == null) {
-                        throw new NoSuchMethodException("Could not find valid constructor for " + optclz.getName());
-                    }
-                    if (result instanceof Configurable) {
-                        ((Configurable) result).setConf(conf);
-                    }
-                    result.optimize(tupleExpr, dataset, bindings);
-                }
-            }
-
-            (new FilterOptimizer()).optimize(tupleExpr, dataset, bindings);
-            (new OrderLimitOptimizer()).optimize(tupleExpr, dataset, bindings);
-            
-            logger.trace("Optimized query model:\n{}", tupleExpr.toString());
-
-            if (queryConf.isInfer()
-                    && this.inferenceEngine != null
-                    ) {
-                try {
-                    tupleExpr.visit(new TransitivePropertyVisitor(queryConf, inferenceEngine));
-                    tupleExpr.visit(new SymmetricPropertyVisitor(queryConf, inferenceEngine));
-                    tupleExpr.visit(new InverseOfVisitor(queryConf, inferenceEngine));
-                    tupleExpr.visit(new SubPropertyOfVisitor(queryConf, inferenceEngine));
-                    tupleExpr.visit(new SubClassOfVisitor(queryConf, inferenceEngine));
-                    tupleExpr.visit(new SameAsVisitor(queryConf, inferenceEngine));
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-            if (queryConf.isPerformant()) {
-                tupleExpr.visit(new SeparateFilterJoinsVisitor());
-//                tupleExpr.visit(new FilterTimeIndexVisitor(queryConf));
-//                tupleExpr.visit(new PartitionFilterTimeIndexVisitor(queryConf));
-            }
-            FilterRangeVisitor rangeVisitor = new FilterRangeVisitor(queryConf);
-            tupleExpr.visit(rangeVisitor);
-            tupleExpr.visit(rangeVisitor); //this has to be done twice to get replace the statementpatterns with the right ranges
-            EvaluationStatistics stats = null;
-            if (!queryConf.isUseStats() && queryConf.isPerformant() || rdfEvalStatsDAO == null) {
-                stats = new DefaultStatistics();
-            } else if (queryConf.isUseStats()) {
-
-                if (queryConf.isUseSelectivity()) {
-                    stats = new RdfCloudTripleStoreSelectivityEvaluationStatistics(queryConf, rdfEvalStatsDAO,
-                            selectEvalDAO);
-                } else {
-                    stats = new RdfCloudTripleStoreEvaluationStatistics(queryConf, rdfEvalStatsDAO);
-                }
-            }
-            if (stats != null) {
-
-                if (stats instanceof RdfCloudTripleStoreSelectivityEvaluationStatistics) {
-
-                    (new QueryJoinSelectOptimizer((RdfCloudTripleStoreSelectivityEvaluationStatistics) stats,
-                            selectEvalDAO)).optimize(tupleExpr, dataset, bindings);
-                } else {
-
-                    (new mvm.rya.rdftriplestore.evaluation.QueryJoinOptimizer(stats)).optimize(tupleExpr, dataset,
-                            bindings); // TODO: Make pluggable
-                }
-            }
-
-            final CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy
-                    .evaluate(tupleExpr, EmptyBindingSet.getInstance());
-            CloseableIteration<BindingSet, QueryEvaluationException> iterWrap = new CloseableIteration<BindingSet, QueryEvaluationException>() {
-                
-                @Override
-                public void remove() throws QueryEvaluationException {
-                  iter.remove();
-                }
-                
-                @Override
-                public BindingSet next() throws QueryEvaluationException {
-                    return iter.next();
-                }
-                
-                @Override
-                public boolean hasNext() throws QueryEvaluationException {
-                    return iter.hasNext();
-                }
-                
-                @Override
-                public void close() throws QueryEvaluationException {
-                    iter.close();
-                    strategy.shutdown();
-                }
-            };
-            return iterWrap;
-        } catch (QueryEvaluationException e) {
-            throw new SailException(e);
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    protected CloseableIteration<? extends Resource, SailException> getContextIDsInternal()
-            throws SailException {
-        verifyIsOpen();
-
-        // iterate through all contextids
-        return null;
-    }
-
-    @Override
-    protected String getNamespaceInternal(String s) throws SailException {
-        return namespaceManager.getNamespace(s);
-    }
-
-    @Override
-    protected CloseableIteration<? extends Namespace, SailException> getNamespacesInternal()
-            throws SailException {
-        return namespaceManager.iterateNamespace();
-    }
-
-    @Override
-    protected CloseableIteration<? extends Statement, SailException> getStatementsInternal(
-            Resource subject, URI predicate, Value object, boolean flag,
-            Resource... contexts) throws SailException {
-//        try {
-        //have to do this to get the inferred values
-        //TODO: Will this method reduce performance?
-        final Var subjVar = decorateValue(subject, "s");
-        final Var predVar = decorateValue(predicate, "p");
-        final Var objVar = decorateValue(object, "o");
-        StatementPattern sp = null;
-        final boolean hasContext = contexts != null && contexts.length > 0;
-        final Resource context = (hasContext) ? contexts[0] : null;
-        final Var cntxtVar = decorateValue(context, "c");
-        //TODO: Only using one context here
-        sp = new StatementPattern(subjVar, predVar, objVar, cntxtVar);
-        //return new StoreTripleSource(store.getConf()).getStatements(resource, uri, value, contexts);
-        final CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluate = evaluate(sp, null, null, false);
-        return new CloseableIteration<Statement, SailException>() {  //TODO: Use a util class to do this
-            private boolean isClosed = false;
-
-            @Override
-            public void close() throws SailException {
-            isClosed = true;
-                try {
-                    evaluate.close();
-                } catch (QueryEvaluationException e) {
-                    throw new SailException(e);
-                }
-            }
-
-            @Override
-            public boolean hasNext() throws SailException {
-                try {
-                    return evaluate.hasNext();
-                } catch (QueryEvaluationException e) {
-                    throw new SailException(e);
-                }
-            }
-
-            @Override
-            public Statement next() throws SailException {
-                if (!hasNext() || isClosed) {
-                    throw new NoSuchElementException();
-                }
-
-                try {
-                    BindingSet next = evaluate.next();
-                    Resource bs_subj = (Resource) ((subjVar.hasValue()) ? subjVar.getValue() : next.getBinding(subjVar.getName()).getValue());
-                    URI bs_pred = (URI) ((predVar.hasValue()) ? predVar.getValue() : next.getBinding(predVar.getName()).getValue());
-                    Value bs_obj = (objVar.hasValue()) ? objVar.getValue() : (Value) next.getBinding(objVar.getName()).getValue();
-                    Binding b_cntxt = next.getBinding(cntxtVar.getName());
-
-                    //convert BindingSet to Statement
-                    if (b_cntxt != null) {
-                        return new ContextStatementImpl(bs_subj, bs_pred, bs_obj, (Resource) b_cntxt.getValue());
-                    } else {
-                        return new StatementImpl(bs_subj, bs_pred, bs_obj);
-                    }
-                } catch (QueryEvaluationException e) {
-                    throw new SailException(e);
-                }
-            }
-
-            @Override
-            public void remove() throws SailException {
-                try {
-                    evaluate.remove();
-                } catch (QueryEvaluationException e) {
-                    throw new SailException(e);
-                }
-            }
-        };
-//        } catch (QueryEvaluationException e) {
-//            throw new SailException(e);
-//        }
-    }
-
-    protected Var decorateValue(Value val, String name) {
-        if (val == null) {
-            return new Var(name);
-        } else {
-            return new Var(name, val);
-        }
-    }
-
-    @Override
-    protected void removeNamespaceInternal(String s) throws SailException {
-        namespaceManager.removeNamespace(s);
-    }
-
-    @Override
-    protected void removeStatementsInternal(Resource subject, URI predicate,
-                                            Value object, Resource... contexts) throws SailException {
-        if (!(subject instanceof URI)) {
-            throw new SailException("Subject[" + subject + "] must be URI");
-        }
-
-        try {
-            if (contexts != null && contexts.length > 0) {
-                for (Resource context : contexts) {
-                    if (!(context instanceof URI)) {
-                        throw new SailException("Context[" + context + "] must be URI");
-                    }
-                    RyaStatement statement = new RyaStatement(
-                            RdfToRyaConversions.convertResource(subject),
-                            RdfToRyaConversions.convertURI(predicate),
-                            RdfToRyaConversions.convertValue(object),
-                            RdfToRyaConversions.convertResource(context));
-
-                    ryaDAO.delete(statement, conf);
-                }
-            } else {
-                RyaStatement statement = new RyaStatement(
-                        RdfToRyaConversions.convertResource(subject),
-                        RdfToRyaConversions.convertURI(predicate),
-                        RdfToRyaConversions.convertValue(object),
-                        null);
-
-                ryaDAO.delete(statement, conf);
-            }
-        } catch (RyaDAOException e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    protected void rollbackInternal() throws SailException {
-        //TODO: No transactional layer as of yet
-    }
-
-    @Override
-    protected void setNamespaceInternal(String s, String s1)
-            throws SailException {
-        namespaceManager.addNamespace(s, s1);
-    }
-
-    @Override
-    protected long sizeInternal(Resource... contexts) throws SailException {
-        logger.error("Cannot determine size as of yet");
-
-        return 0;
-    }
-
-    @Override
-    protected void startTransactionInternal() throws SailException {
-        //TODO: ?
-    }
-
-    public class StoreTripleSource implements TripleSource {
-
-        private RdfCloudTripleStoreConfiguration conf;
-
-        public StoreTripleSource(RdfCloudTripleStoreConfiguration conf) {
-            this.conf = conf;
-        }
-
-        public CloseableIteration<Statement, QueryEvaluationException> getStatements(
-                Resource subject, URI predicate, Value object,
-                Resource... contexts) throws QueryEvaluationException {
-            return RyaDAOHelper.query(ryaDAO, subject, predicate, object, conf, contexts);
-        }
-
-        public CloseableIteration<? extends Entry<Statement, BindingSet>, QueryEvaluationException> getStatements(
-                Collection<Map.Entry<Statement, BindingSet>> statements,
-                Resource... contexts) throws QueryEvaluationException {
-
-            return RyaDAOHelper.query(ryaDAO, statements, conf);
-        }
-
-        public ValueFactory getValueFactory() {
-            return RdfCloudTripleStoreConstants.VALUE_FACTORY;
-        }
-    }
-    
-    public InferenceEngine getInferenceEngine() {
-        return inferenceEngine;
-    }
-    public RdfCloudTripleStoreConfiguration getConf() {
-        return conf;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java
deleted file mode 100644
index ae13832..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreFactory.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package mvm.rya.rdftriplestore;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.sail.Sail;
-import org.openrdf.sail.config.SailConfigException;
-import org.openrdf.sail.config.SailFactory;
-import org.openrdf.sail.config.SailImplConfig;
-
-public class RdfCloudTripleStoreFactory implements SailFactory {
-
-	public static final String SAIL_TYPE = "openrdf:RdfCloudTripleStore";
-
-	@Override
-	public SailImplConfig getConfig() {
-		return new RdfCloudTripleStoreSailConfig();
-	}
-
-	@Override
-	public Sail getSail(SailImplConfig config) throws SailConfigException {
-//		RdfCloudTripleStore cbStore = new RdfCloudTripleStore();
-//		RdfCloudTripleStoreSailConfig cbconfig = (RdfCloudTripleStoreSailConfig) config;
-//		cbStore.setServer(cbconfig.getServer());
-//		cbStore.setPort(cbconfig.getPort());
-//		cbStore.setInstance(cbconfig.getInstance());
-//		cbStore.setPassword(cbconfig.getPassword());
-//		cbStore.setUser(cbconfig.getUser());
-//		return cbStore;
-        return null; //TODO: How?
-	}
-
-	@Override
-	public String getSailType() {
-		return SAIL_TYPE;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java
deleted file mode 100644
index d9b6f25..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RdfCloudTripleStoreSailConfig.java
+++ /dev/null
@@ -1,132 +0,0 @@
-package mvm.rya.rdftriplestore;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.model.*;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.model.util.GraphUtil;
-import org.openrdf.model.util.GraphUtilException;
-import org.openrdf.sail.config.SailConfigException;
-import org.openrdf.sail.config.SailImplConfigBase;
-
-public class RdfCloudTripleStoreSailConfig extends SailImplConfigBase {
-    
-    public static final String NAMESPACE = "http://www.openrdf.org/config/sail/cloudbasestore#";
-
-	public static final URI SERVER;
-	public static final URI PORT;
-	public static final URI INSTANCE;
-	public static final URI USER;
-	public static final URI PASSWORD;
-
-    static {
-		ValueFactory factory = ValueFactoryImpl.getInstance();
-		SERVER = factory.createURI(NAMESPACE, "server");
-		PORT = factory.createURI(NAMESPACE, "port");
-		INSTANCE = factory.createURI(NAMESPACE, "instance");
-		USER = factory.createURI(NAMESPACE, "user");
-		PASSWORD = factory.createURI(NAMESPACE, "password");
-	}
-
-	private String server = "stratus13";
-
-	private int port = 2181;
-
-	private String user = "root";
-
-	private String password = "password";
-	
-	private String instance = "stratus";
-
-	public String getServer() {
-		return server;
-	}
-
-	public void setServer(String server) {
-		this.server = server;
-	}
-
-	public int getPort() {
-		return port;
-	}
-
-	public void setPort(int port) {
-		this.port = port;
-	}
-
-	public String getUser() {
-		return user;
-	}
-
-	public void setUser(String user) {
-		this.user = user;
-	}
-
-	public String getPassword() {
-		return password;
-	}
-
-	public void setPassword(String password) {
-		this.password = password;
-	}
-
-	public String getInstance() {
-		return instance;
-	}
-
-	public void setInstance(String instance) {
-		this.instance = instance;
-	}
-
-    @Override
-	public void parse(Graph graph, Resource implNode)
-		throws SailConfigException
-	{
-		super.parse(graph, implNode);
-        System.out.println("parsing");
-
-		try {
-			Literal serverLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, SERVER);
-			if (serverLit != null) {
-				setServer(serverLit.getLabel());
-			}
-			Literal portLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, PORT);
-			if (portLit != null) {
-				setPort(Integer.parseInt(portLit.getLabel()));
-			}
-			Literal instList = GraphUtil.getOptionalObjectLiteral(graph, implNode, INSTANCE);
-			if (instList != null) {
-				setInstance(instList.getLabel());
-			}
-			Literal userLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, USER);
-			if (userLit != null) {
-				setUser(userLit.getLabel());
-			}
-			Literal pwdLit = GraphUtil.getOptionalObjectLiteral(graph, implNode, PASSWORD);
-			if (pwdLit != null) {
-				setPassword(pwdLit.getLabel());
-			}
-		}
-		catch (GraphUtilException e) {
-			throw new SailConfigException(e.getMessage(), e);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java
deleted file mode 100644
index a2df04d..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepository.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package mvm.rya.rdftriplestore;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.sail.Sail;
-import org.openrdf.sail.SailException;
-
-/**
- * Created by IntelliJ IDEA.
- * User: RoshanP
- * Date: 3/23/12
- * Time: 10:05 AM
- * To change this template use File | Settings | File Templates.
- */
-public class RyaSailRepository extends SailRepository{
-    public RyaSailRepository(Sail sail) {
-        super(sail);
-    }
-
-    @Override
-    public SailRepositoryConnection getConnection() throws RepositoryException {
-        try
-        {
-            return new RyaSailRepositoryConnection(this, this.getSail().getConnection());
-        }
-        catch(SailException e)
-        {
-            throw new RepositoryException(e);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java
deleted file mode 100644
index fd5e4d4..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/RyaSailRepositoryConnection.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package mvm.rya.rdftriplestore;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Reader;
-
-import mvm.rya.rdftriplestore.utils.CombineContextsRdfInserter;
-
-import org.openrdf.OpenRDFUtil;
-import org.openrdf.model.Resource;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.repository.util.RDFLoader;
-import org.openrdf.rio.RDFFormat;
-import org.openrdf.rio.RDFHandlerException;
-import org.openrdf.rio.RDFParseException;
-import org.openrdf.sail.SailConnection;
-
-/**
- * The real reason for this is so that we can combine contexts from an input stream/reader and the given contexts in the add function
- */
-public class RyaSailRepositoryConnection extends SailRepositoryConnection {
-
-    protected RyaSailRepositoryConnection(SailRepository repository, SailConnection sailConnection) {
-        super(repository, sailConnection);
-    }
-
-    @Override
-    public void add(InputStream in, String baseURI, RDFFormat dataFormat, Resource... contexts) throws IOException, RDFParseException,
-            RepositoryException {
-        OpenRDFUtil.verifyContextNotNull(contexts);
-
-        CombineContextsRdfInserter rdfInserter = new CombineContextsRdfInserter(this);
-        rdfInserter.enforceContext(contexts);
-
-        boolean localTransaction = startLocalTransaction();
-        try {
-            RDFLoader loader = new RDFLoader(getParserConfig(), getValueFactory());
-            loader.load(in, baseURI, dataFormat, rdfInserter);
-
-            conditionalCommit(localTransaction);
-        } catch (RDFHandlerException e) {
-            conditionalRollback(localTransaction);
-
-            throw ((RepositoryException) e.getCause());
-        } catch (RDFParseException e) {
-            conditionalRollback(localTransaction);
-            throw e;
-        } catch (IOException e) {
-            conditionalRollback(localTransaction);
-            throw e;
-        } catch (RuntimeException e) {
-            conditionalRollback(localTransaction);
-            throw e;
-        }
-    }
-
-    @Override
-    public void add(Reader reader, String baseURI, RDFFormat dataFormat, Resource... contexts) throws IOException, RDFParseException,
-            RepositoryException {
-        OpenRDFUtil.verifyContextNotNull(contexts);
-
-        CombineContextsRdfInserter rdfInserter = new CombineContextsRdfInserter(this);
-        rdfInserter.enforceContext(contexts);
-
-        boolean localTransaction = startLocalTransaction();
-        try {
-            RDFLoader loader = new RDFLoader(getParserConfig(), getValueFactory());
-            loader.load(reader, baseURI, dataFormat, rdfInserter);
-
-            conditionalCommit(localTransaction);
-        } catch (RDFHandlerException e) {
-            conditionalRollback(localTransaction);
-
-            throw ((RepositoryException) e.getCause());
-        } catch (RDFParseException e) {
-            conditionalRollback(localTransaction);
-            throw e;
-        } catch (IOException e) {
-            conditionalRollback(localTransaction);
-            throw e;
-        } catch (RuntimeException e) {
-            conditionalRollback(localTransaction);
-            throw e;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java
deleted file mode 100644
index f3ea4b8..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalBatchingIterator.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-
-import java.util.Collection;
-
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-
-public interface ExternalBatchingIterator {
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(Collection<BindingSet> bindingset) throws QueryEvaluationException;
-}


[35/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java
index a530ddf..d372615 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleNode.java
@@ -3,25 +3,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 public
 class SimpleNode implements Node {
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java
index 1ef8ec9..95292d3 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Token.java
@@ -3,25 +3,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 /**
  * Describes the input token stream.
  */


[29/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java
index eb480ad..4a57f31 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityIdentityReducerTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java
index 8647294..3818300 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/CardinalityMapperTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 
 import mvm.rya.joinselect.mr.JoinSelectProspectOutput;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java
index c070488..705edb1 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/FullTableSizeTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 
 import mvm.rya.joinselect.mr.FullTableSize;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java
index 5d2acb7..be03565 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinReducerTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java
index e9281da..0d53b90 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectMapperTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.Map;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java
index c00f8ff..19c90a3 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectProspectOutputTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.joinselect.mr;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.*;
 
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java
index 525cd94..98236d3 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsSumTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 
 import mvm.rya.joinselect.mr.JoinSelectStatisticsSum;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java
index c9a04a9..7061a2c 100644
--- a/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java
+++ b/extras/rya.prospector/src/test/java/mvm/rya/joinselect/mr/JoinSelectStatisticsTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.joinselect.mr;
 
 /*
- * #%L
- * mvm.rya.rya.prospector
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.INPUTPATH;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.INSTANCE;
 import static mvm.rya.joinselect.mr.utils.JoinSelectConstants.OUTPUTPATH;
@@ -83,6 +84,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
@@ -124,7 +126,7 @@ public class JoinSelectStatisticsTest {
 
             Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
             job.setJarByClass(this.getClass());
-            job.setUserClassesTakesPrecedence(true);
+            conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
             
             initTabToSeqFileJob(job, inTable, outPath);
             job.setMapperClass(JoinSelectMapper.class);
@@ -153,7 +155,7 @@ public class JoinSelectStatisticsTest {
 
             Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
             job.setJarByClass(this.getClass());
-            job.setUserClassesTakesPrecedence(true);
+            conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
             
             initTabToSeqFileJob(job, inTable, outPath);
             job.setMapperClass(CardinalityMapper.class);
@@ -175,9 +177,9 @@ public class JoinSelectStatisticsTest {
             Configuration conf = getConf();
             String outpath = conf.get(OUTPUTPATH);
     
-            Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+            Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
             job.setJarByClass(this.getClass());
-            job.setUserClassesTakesPrecedence(true);
+            conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
             
             MultipleInputs.addInputPath(job, new Path(PROSPECTSOUT.getAbsolutePath()), 
                     SequenceFileInputFormat.class, JoinSelectAggregateMapper.class);
@@ -867,4 +869,4 @@ public class JoinSelectStatisticsTest {
     
     
     
-   
\ No newline at end of file
+   

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.prospector/src/test/resources/stats_cluster_config.xml
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/test/resources/stats_cluster_config.xml b/extras/rya.prospector/src/test/resources/stats_cluster_config.xml
index 7287813..5c96044 100644
--- a/extras/rya.prospector/src/test/resources/stats_cluster_config.xml
+++ b/extras/rya.prospector/src/test/resources/stats_cluster_config.xml
@@ -1,4 +1,25 @@
 <?xml version="1.0"?>
+
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
 <configuration>
    <!-- Accumulo info -->

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/pom.xml
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/pom.xml b/extras/tinkerpop.rya/pom.xml
index 51e21eb..88d7a09 100644
--- a/extras/tinkerpop.rya/pom.xml
+++ b/extras/tinkerpop.rya/pom.xml
@@ -1,42 +1,64 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.extras</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
-    <name>${project.groupId}.${project.artifactId}</name>
 
     <artifactId>tinkerpop.rya</artifactId>
+    <name>Apache Rya Tinkerpop</name>
 
     <dependencies>
         <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.sail.impl</artifactId>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.sail</artifactId>
         </dependency>
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>accumulo.rya</artifactId>
         </dependency>
+        
         <dependency>
             <groupId>com.tinkerpop.gremlin</groupId>
             <artifactId>gremlin-groovy</artifactId>
         </dependency>
+
         <dependency>
             <groupId>com.tinkerpop.rexster</groupId>
             <artifactId>rexster-server</artifactId>
-            <version>${blueprints.version}</version>
         </dependency>
         <dependency>
             <groupId>com.tinkerpop.blueprints</groupId>
             <artifactId>blueprints-sail-graph</artifactId>
         </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
     <build>
@@ -44,7 +66,6 @@
             <plugin>
                 <groupId>org.codehaus.gmaven</groupId>
                 <artifactId>gmaven-plugin</artifactId>
-                <version>1.3</version>
                 <dependencies>
                     <dependency>
                         <groupId>org.codehaus.groovy</groupId>
@@ -80,57 +101,4 @@
         </plugins>
     </build>
 
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>uberjar</id>
-            <build>
-                <plugins>
-                    <plugin>
-                        <artifactId>maven-assembly-plugin</artifactId>
-                        <configuration>
-                            <descriptorRefs>
-                                <descriptorRef>jar-with-dependencies</descriptorRef>
-                            </descriptorRefs>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <id>make-assembly</id>
-                                <phase>package</phase>
-                                <goals>
-                                    <goal>single</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy
index 6559370..fc3419d 100644
--- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy
+++ b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/config/RyaGraphConfiguration.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.blueprints.config
 
 import com.tinkerpop.blueprints.Direction;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy
index 13a6470..73c4fc1 100644
--- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy
+++ b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdge.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 //package mvm.rya.blueprints.sail
 //
 //import com.tinkerpop.blueprints.pgm.impls.sail.SailEdge

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy
index 2bf26af..8d04c75 100644
--- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy
+++ b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailEdgeSequence.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.blueprints.sail
 
 import com.tinkerpop.blueprints.Edge

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy
index e5783e2..7c78e31 100644
--- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy
+++ b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailGraph.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.blueprints.sail
 
 import com.tinkerpop.blueprints.impls.sail.SailGraph

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy
index 3f6ec64..96ef08e 100644
--- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy
+++ b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertex.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 //package mvm.rya.blueprints.sail
 //
 //import com.tinkerpop.blueprints.pgm.impls.MultiIterable

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy
index e0d7479..451955d 100644
--- a/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy
+++ b/extras/tinkerpop.rya/src/main/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequence.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.blueprints.sail
 
 import com.google.common.collect.Iterators

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java b/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java
index 4345283..22eff06 100644
--- a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java
+++ b/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailEdge.java
@@ -1,25 +1,26 @@
 package mvm.rya.blueprints.sail;
 
 /*
- * #%L
- * mvm.rya.tinkerpop.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.tinkerpop.blueprints.impls.sail.SailEdge;
 import org.openrdf.model.Resource;
 import org.openrdf.model.Statement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java b/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java
index c02a376..2bb8f3f 100644
--- a/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java
+++ b/extras/tinkerpop.rya/src/main/java/mvm/rya/blueprints/sail/RyaSailVertex.java
@@ -1,25 +1,26 @@
 package mvm.rya.blueprints.sail;
 
 /*
- * #%L
- * mvm.rya.tinkerpop.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.tinkerpop.blueprints.Direction;
 import com.tinkerpop.blueprints.Edge;
 import com.tinkerpop.blueprints.util.MultiIterable;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy
index 2914ac1..fe0f4e0 100644
--- a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy
+++ b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/TstGremlinRya.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 //package mvm.rya.blueprints
 //
 //import com.tinkerpop.blueprints.pgm.impls.sail.SailGraph
@@ -24,25 +43,6 @@
 //
 //        def conf = new AccumuloRdfConfiguration();
 
-/*
- * #%L
- * mvm.rya.tinkerpop.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
 //        conf.setDisplayQueryPlan(true);
 //        def store = new RdfCloudTripleStore();
 //        store.setConf(conf);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy
index 1de38f6..9dd0627 100644
--- a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy
+++ b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/config/RyaGraphConfigurationTest.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.blueprints.config
 
 import com.tinkerpop.blueprints.Vertex
@@ -28,25 +47,6 @@ import org.apache.accumulo.core.security.TablePermission
 class RyaGraphConfigurationTest extends TestCase {
     private String user = "user";
 
-/*
- * #%L
- * mvm.rya.tinkerpop.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
     private String pwd = "pwd";
     private String instance = "myinstance";
     private String tablePrefix = "t_";

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy
index 9196eb7..c661350 100644
--- a/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy
+++ b/extras/tinkerpop.rya/src/test/groovy/mvm/rya/blueprints/sail/RyaSailVertexSequenceTest.groovy
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package mvm.rya.blueprints.sail
 
 import mvm.rya.api.utils.IteratorWrapper
@@ -24,25 +43,6 @@ class RyaSailVertexSequenceTest extends TestCase {
                 ]
         );
 
-/*
- * #%L
- * mvm.rya.tinkerpop.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
         def a = vf.createURI(namespace, "a")
         def b = vf.createURI(namespace, "b")
         def c = vf.createURI(namespace, "c")

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/tinkerpop.rya/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/extras/tinkerpop.rya/src/test/resources/log4j.properties b/extras/tinkerpop.rya/src/test/resources/log4j.properties
index 8ed05c6..598d7b5 100644
--- a/extras/tinkerpop.rya/src/test/resources/log4j.properties
+++ b/extras/tinkerpop.rya/src/test/resources/log4j.properties
@@ -1,19 +1,19 @@
-###
-# #%L
-# mvm.rya.tinkerpop.rya
-# %%
-# Copyright (C) 2014 Rya
-# %%
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
 # 
-#      http://www.apache.org/licenses/LICENSE-2.0
+#   http://www.apache.org/licenses/LICENSE-2.0
 # 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# #L%
-###
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/iterators/accumulo.iterators/pom.xml
----------------------------------------------------------------------
diff --git a/iterators/accumulo.iterators/pom.xml b/iterators/accumulo.iterators/pom.xml
deleted file mode 100644
index fe36f39..0000000
--- a/iterators/accumulo.iterators/pom.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.iterators</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>accumulo.iterators</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <properties>
-
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.accumulo</groupId>
-            <artifactId>accumulo-core</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/iterators/accumulo.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java
----------------------------------------------------------------------
diff --git a/iterators/accumulo.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java b/iterators/accumulo.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java
deleted file mode 100644
index 33d5b98..0000000
--- a/iterators/accumulo.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java
+++ /dev/null
@@ -1,138 +0,0 @@
-package mvm.rya.iterators;
-
-/*
- * #%L
- * mvm.rya.accumulo.iterators
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.iterators.Filter;
-import org.apache.accumulo.core.iterators.IteratorEnvironment;
-import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
-
-/**
- * A small modification of the age off filter that ships with Accumulo which ages off key/value pairs based on the
- * Key's timestamp. It removes an entry if its timestamp is less than currentTime - threshold.
- *
- * The modification will now allow rows with timestamp > currentTime to pass through.
- *
- * This filter requires a "ttl" option, in milliseconds, to determine the age off threshold.
- */
-public class LimitingAgeOffFilter extends Filter {
-
-    public static final String TTL = "ttl";
-    public static final String CURRENT_TIME = "currentTime";
-
-    protected long threshold;
-
-    /**
-     * The use of private for this member in the original AgeOffFilter wouldn't allow me to extend it. Setting to protected.
-     */
-    protected long currentTime;
-
-    /**
-     * Accepts entries whose timestamps are less than currentTime - threshold.
-     *
-     * @see org.apache.accumulo.core.iterators.Filter#accept(org.apache.accumulo.core.data.Key, org.apache.accumulo.core.data.Value)
-     */
-    @Override
-    public boolean accept(Key k, Value v) {
-        long diff = currentTime - k.getTimestamp();
-        return !(diff > threshold || diff < 0);
-    }
-
-    @Override
-    public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options, IteratorEnvironment env) throws IOException {
-        super.init(source, options, env);
-        threshold = -1;
-        if (options == null)
-            throw new IllegalArgumentException(TTL + " must be set for LimitingAgeOffFilter");
-
-        String ttl = options.get(TTL);
-        if (ttl == null)
-            throw new IllegalArgumentException(TTL + " must be set for LimitingAgeOffFilter");
-
-        threshold = Long.parseLong(ttl);
-
-        String time = options.get(CURRENT_TIME);
-        if (time != null)
-            currentTime = Long.parseLong(time);
-        else
-            currentTime = System.currentTimeMillis();
-
-        // add sanity checks for threshold and currentTime?
-    }
-
-    @Override
-    public SortedKeyValueIterator<Key,Value> deepCopy(IteratorEnvironment env) {
-        LimitingAgeOffFilter copy = (LimitingAgeOffFilter) super.deepCopy(env);
-        copy.currentTime = currentTime;
-        copy.threshold = threshold;
-        return copy;
-    }
-
-    @Override
-    public IteratorOptions describeOptions() {
-        IteratorOptions io = super.describeOptions();
-        io.addNamedOption(TTL, "time to live (milliseconds)");
-        io.addNamedOption(CURRENT_TIME, "if set, use the given value as the absolute time in milliseconds as the current time of day");
-        io.setName("ageoff");
-        io.setDescription("LimitingAgeOffFilter removes entries with timestamps more than <ttl> milliseconds old & timestamps newer than currentTime");
-        return io;
-    }
-
-    @Override
-    public boolean validateOptions(Map<String,String> options) {
-        super.validateOptions(options);
-        try {
-            Long.parseLong(options.get(TTL));
-        } catch (NumberFormatException e) {
-            return false;
-        }
-        return true;
-    }
-
-    /**
-     * A convenience method for setting the age off threshold.
-     *
-     * @param is
-     *          IteratorSetting object to configure.
-     * @param ttl
-     *          age off threshold in milliseconds.
-     */
-    public static void setTTL(IteratorSetting is, Long ttl) {
-        is.addOption(TTL, Long.toString(ttl));
-    }
-
-    /**
-     * A convenience method for setting the current time (from which to measure the age off threshold).
-     *
-     * @param is
-     *          IteratorSetting object to configure.
-     * @param currentTime
-     *          time in milliseconds.
-     */
-    public static void setCurrentTime(IteratorSetting is, Long currentTime) {
-        is.addOption(CURRENT_TIME, Long.toString(currentTime));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/iterators/accumulo.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java
----------------------------------------------------------------------
diff --git a/iterators/accumulo.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java b/iterators/accumulo.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java
deleted file mode 100644
index 5e8e2ee..0000000
--- a/iterators/accumulo.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package mvm.rya.iterators;
-
-/*
- * #%L
- * mvm.rya.accumulo.iterators
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.iterators.SortedMapIterator;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TreeMap;
-
-import static org.junit.Assert.*;
-
-/**
- * Date: 1/11/13
- * Time: 10:18 AM
- */
-public class LimitingAgeOffFilterTest {
-
-    @Test
-    public void testTimeRange() throws Exception {
-        LimitingAgeOffFilter filter = new LimitingAgeOffFilter();
-        Map<String, String> map = new HashMap<String, String>();
-        map.put(LimitingAgeOffFilter.TTL, "10000");
-        map.put(LimitingAgeOffFilter.CURRENT_TIME, "1010001");
-        filter.init(new SortedMapIterator(new TreeMap<Key, Value>()), map, null);
-
-        assertFalse(filter.accept(new Key(new Text("row1"), 1000000), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1000001), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1000011), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1010001), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), 1010002), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), 1010012), null));
-    }
-
-    @Test
-    public void testTimeRangeSetOptions() throws Exception {
-        try {
-            LimitingAgeOffFilter filter = new LimitingAgeOffFilter();
-            Map<String, String> map = new HashMap<String, String>();
-            filter.init(new SortedMapIterator(new TreeMap<Key, Value>()), map, null);
-            fail();
-        } catch (Exception e) {
-        }
-    }
-
-    @Test
-    public void testTimeRangeCurrentTime() throws Exception {
-        long currentTime = System.currentTimeMillis();
-        LimitingAgeOffFilter filter = new LimitingAgeOffFilter();
-        Map<String, String> map = new HashMap<String, String>();
-        map.put(LimitingAgeOffFilter.TTL, "10000");
-        filter.init(new SortedMapIterator(new TreeMap<Key, Value>()), map, null);
-
-        assertFalse(filter.accept(new Key(new Text("row1"), currentTime - 15000), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), currentTime - 5000), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), currentTime + 5000), null));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/iterators/cloudbase.iterators/pom.xml
----------------------------------------------------------------------
diff --git a/iterators/cloudbase.iterators/pom.xml b/iterators/cloudbase.iterators/pom.xml
deleted file mode 100644
index bc3c81e..0000000
--- a/iterators/cloudbase.iterators/pom.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.iterators</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>cloudbase.iterators</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <properties>
-
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-    </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/iterators/cloudbase.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java
----------------------------------------------------------------------
diff --git a/iterators/cloudbase.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java b/iterators/cloudbase.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java
deleted file mode 100644
index 44fe945..0000000
--- a/iterators/cloudbase.iterators/src/main/java/mvm/rya/iterators/LimitingAgeOffFilter.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package mvm.rya.iterators;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.OptionDescriber;
-import cloudbase.core.iterators.filter.Filter;
-
-import java.util.Map;
-import java.util.TreeMap;
-
-/**
- * A small modification of the age off filter that ships with Accumulo which ages off key/value pairs based on the
- * Key's timestamp. It removes an entry if its timestamp is less than currentTime - threshold.
- * <p/>
- * The modification will now allow rows with timestamp > currentTime to pass through.
- * <p/>
- * This filter requires a "ttl" option, in milliseconds, to determine the age off threshold.
- */
-public class LimitingAgeOffFilter implements Filter, OptionDescriber {
-
-    public static final String TTL = "ttl";
-    public static final String CURRENT_TIME = "currentTime";
-
-    protected long threshold;
-
-    /**
-     * The use of private for this member in the original AgeOffFilter wouldn't allow me to extend it. Setting to protected.
-     */
-    protected long currentTime;
-
-    @Override
-    public boolean accept(Key k, Value v) {
-        long diff = currentTime - k.getTimestamp();
-        return !(diff > threshold || diff < 0);
-    }
-
-    @Override
-    public void init(Map<String, String> options) {
-        threshold = -1;
-        if (options == null)
-            throw new IllegalArgumentException(TTL + " must be set for LimitingAgeOffFilter");
-
-        String ttl = options.get(TTL);
-        if (ttl == null)
-            throw new IllegalArgumentException(TTL + " must be set for LimitingAgeOffFilter");
-
-        threshold = Long.parseLong(ttl);
-
-        String time = options.get(CURRENT_TIME);
-        if (time != null)
-            currentTime = Long.parseLong(time);
-        else
-            currentTime = System.currentTimeMillis();
-
-        // add sanity checks for threshold and currentTime?
-    }
-
-    @Override
-    public IteratorOptions describeOptions() {
-        Map<String, String> options = new TreeMap<String, String>();
-        options.put(TTL, "time to live (milliseconds)");
-        options.put(CURRENT_TIME, "if set, use the given value as the absolute time in milliseconds as the current time of day");
-        return new OptionDescriber.IteratorOptions("limitingAgeOff", "LimitingAgeOffFilter removes entries with timestamps more than <ttl> milliseconds old & timestamps newer than currentTime",
-                options, null);
-    }
-
-    @Override
-    public boolean validateOptions(Map<String, String> options) {
-        try {
-            Long.parseLong(options.get(TTL));
-        } catch (NumberFormatException e) {
-            return false;
-        }
-        return true;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/iterators/cloudbase.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java
----------------------------------------------------------------------
diff --git a/iterators/cloudbase.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java b/iterators/cloudbase.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java
deleted file mode 100644
index 83b2f2c..0000000
--- a/iterators/cloudbase.iterators/src/test/java/mvm/rya/iterators/LimitingAgeOffFilterTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package mvm.rya.iterators;
-
-import cloudbase.core.data.Key;
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Date: Mar 23, 2011
- * Time: 10:08:58 AM
- */
-public class LimitingAgeOffFilterTest {
-
-    @Test
-    public void testTimeRange() throws Exception {
-        LimitingAgeOffFilter filter = new LimitingAgeOffFilter();
-        Map<String, String> map = new HashMap<String, String>();
-        map.put(LimitingAgeOffFilter.TTL, "10000");
-        map.put(LimitingAgeOffFilter.CURRENT_TIME, "1010001");
-        filter.init(map);
-
-        assertFalse(filter.accept(new Key(new Text("row1"), 1000000), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1000001), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1000011), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1010001), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), 1010002), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), 1010012), null));
-    }
-
-    @Test
-    public void testTimeRangeSetOptions() throws Exception {
-        try {
-            LimitingAgeOffFilter filter = new LimitingAgeOffFilter();
-            Map<String, String> map = new HashMap<String, String>();
-            filter.init(map);
-            fail();
-        } catch (Exception e) {
-        }
-    }
-
-    @Test
-    public void testTimeRangeCurrentTime() throws Exception {
-        long currentTime = System.currentTimeMillis();
-        LimitingAgeOffFilter filter = new LimitingAgeOffFilter();
-        Map<String, String> map = new HashMap<String, String>();
-        map.put(LimitingAgeOffFilter.TTL, "10000");
-        filter.init(map);
-
-        assertFalse(filter.accept(new Key(new Text("row1"), currentTime - 15000), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), currentTime - 5000), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), currentTime + 5000), null));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/iterators/pom.xml
----------------------------------------------------------------------
diff --git a/iterators/pom.xml b/iterators/pom.xml
deleted file mode 100644
index 5d88b63..0000000
--- a/iterators/pom.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <artifactId>rya.iterators</artifactId>
-    <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
-
-    <modules>
-        <module>accumulo.iterators</module>
-    </modules>
-
-    <profiles>
-        <profile>
-            <id>cloudbase</id>
-            <modules>
-                <module>cloudbase.iterators</module>
-            </modules>
-        </profile>
-    </profiles>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya.console/pom.xml
----------------------------------------------------------------------
diff --git a/osgi/alx.rya.console/pom.xml b/osgi/alx.rya.console/pom.xml
index 8dfeed3..9f5020c 100644
--- a/osgi/alx.rya.console/pom.xml
+++ b/osgi/alx.rya.console/pom.xml
@@ -1,30 +1,50 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.osgi</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
 
-    <packaging>bundle</packaging>
     <artifactId>alx.rya.console</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
+    <name>Apache Rya ALX Console</name>
+
+    <packaging>bundle</packaging>
 
     <dependencies>
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>rya.api</artifactId>
         </dependency>
+
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-repository-api</artifactId>
-            <version>${openrdf.sesame.version}</version>
         </dependency>
         <dependency>
             <groupId>org.apache.karaf.shell</groupId>
             <artifactId>org.apache.karaf.shell.console</artifactId>
-            <version>${karaf.version}</version>
             <scope>provided</scope>
         </dependency>
     </dependencies>
@@ -38,4 +58,4 @@
         </plugins>
     </build>
 
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java
----------------------------------------------------------------------
diff --git a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java
index 2aff2f3..7fada66 100644
--- a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java
+++ b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/AbstractRyaCommand.java
@@ -1,25 +1,26 @@
 package mvm.rya.alx.command;
 
 /*
- * #%L
- * mvm.rya.alx.rya.console
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.persist.RyaDAO;
 import org.apache.karaf.shell.console.OsgiCommandSupport;
 import org.openrdf.repository.Repository;
@@ -54,4 +55,4 @@ public abstract class AbstractRyaCommand extends OsgiCommandSupport {
     }
 
     protected abstract Object doRyaExecute() throws Exception;
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java
----------------------------------------------------------------------
diff --git a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java
index 9ae293e..658f3fc 100644
--- a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java
+++ b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/GetStatementsRyaCommand.java
@@ -1,25 +1,26 @@
 package mvm.rya.alx.command;
 
 /*
- * #%L
- * mvm.rya.alx.rya.console
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.felix.gogo.commands.Command;
 import org.apache.felix.gogo.commands.Option;
 import org.openrdf.model.Resource;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java
----------------------------------------------------------------------
diff --git a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java
index 15ee550..19b002f 100644
--- a/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java
+++ b/osgi/alx.rya.console/src/main/java/mvm/rya/alx/command/InfoRyaCommand.java
@@ -1,25 +1,26 @@
 package mvm.rya.alx.command;
 
 /*
- * #%L
- * mvm.rya.alx.rya.console
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import org.apache.felix.gogo.commands.Command;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml
----------------------------------------------------------------------
diff --git a/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml b/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml
index 0914832..129e9c7 100644
--- a/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml
+++ b/osgi/alx.rya.console/src/main/resources/OSGI-INF/blueprint/alx.rya.console-blueprint.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <blueprint xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0"
            xmlns:ext="http://aries.apache.org/blueprint/xmlns/blueprint-ext/v1.0.0">
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya/pom.xml
----------------------------------------------------------------------
diff --git a/osgi/alx.rya/pom.xml b/osgi/alx.rya/pom.xml
index 7ce4684..e2ca105 100644
--- a/osgi/alx.rya/pom.xml
+++ b/osgi/alx.rya/pom.xml
@@ -1,36 +1,47 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.osgi</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>alx.rya</artifactId>
+    <name>Apache Rya ALX</name>
+
     <packaging>bundle</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <dependencies>
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>accumulo.rya</artifactId>
         </dependency>
         <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
+            <scope>test</scope>
         </dependency>
     </dependencies>
-    <repositories>
-        <repository>
-            <releases>
-                <enabled>true</enabled>
-            </releases>
-            <snapshots>
-                <enabled>false</enabled>
-            </snapshots>
-            <id>aduna-opensource.releases</id>
-            <name>Aduna Open Source - Maven releases</name>
-            <url>http://repo.aduna-software.org/maven2/releases</url>
-        </repository>
-    </repositories>
     <build>
         <plugins>
             <plugin>
@@ -47,8 +58,7 @@
             </plugin>
             <plugin>
                 <groupId>org.codehaus.mojo</groupId>
-                <artifactId>build-helper-maven-plugin
-                </artifactId>
+                <artifactId>build-helper-maven-plugin</artifactId>
                 <version>1.7</version>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya/src/main/features/alx.rya-features.xml
----------------------------------------------------------------------
diff --git a/osgi/alx.rya/src/main/features/alx.rya-features.xml b/osgi/alx.rya/src/main/features/alx.rya-features.xml
index efc6917..9e36c33 100644
--- a/osgi/alx.rya/src/main/features/alx.rya-features.xml
+++ b/osgi/alx.rya/src/main/features/alx.rya-features.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <features name="alx.rya">
     <feature name='org.openrdf.sesame.runtime' version="2.6.4">
         <bundle>wrap:mvn:org.openrdf.sesame/sesame-model/2.6.4</bundle>
@@ -81,4 +101,4 @@
         <feature version="3.0.4">alx.rya</feature>
         <bundle>mvn:mvm.rya/alx.rya.console/3.0.4-SNAPSHOT</bundle>
     </feature>
-</features>
\ No newline at end of file
+</features>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java
----------------------------------------------------------------------
diff --git a/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java b/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java
index 4cf4edb..849cf60 100644
--- a/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java
+++ b/osgi/alx.rya/src/main/java/mvm/rya/alx/util/ConfigurationFactory.java
@@ -1,25 +1,26 @@
 package mvm.rya.alx.util;
 
 /*
- * #%L
- * mvm.rya.alx.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.accumulo.AccumuloRdfConfiguration;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml
----------------------------------------------------------------------
diff --git a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml b/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml
index 4c11e59..76f1bd3 100644
--- a/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml
+++ b/osgi/alx.rya/src/main/resources/META-INF/spring/alx.rya-spring-osgi.xml
@@ -1,12 +1,23 @@
-<!-- Copyright (C) 2008 PROTEUS Technologies, LLC This program is free software:
-    you can redistribute it and/or modify it under the terms of the GNU General
-    Public License as published by the Free Software Foundation, either version
-    3 of the License, or (at your option) any later version. This program is
-    distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
-    without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-    PURPOSE. See the GNU General Public License for more details. You should
-    have received a copy of the GNU General Public License along with this program.
-    If not, see <http://www.gnu.org/licenses/>. -->
+<?xml version='1.0'?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
 
 <beans xmlns="http://www.springframework.org/schema/beans"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:osgi="http://www.springframework.org/schema/osgi"
@@ -39,4 +50,4 @@
     <osgi:service ref="sailRepo" auto-export="interfaces"/>
     <osgi:service ref="ryaDAO" auto-export="interfaces"/>
 
-</beans>
\ No newline at end of file
+</beans>



[19/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/cimRdf.xml
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/cimRdf.xml b/partition/web.partition.rdf/cimRdf.xml
deleted file mode 100644
index 3246ed5..0000000
--- a/partition/web.partition.rdf/cimRdf.xml
+++ /dev/null
@@ -1,15740 +0,0 @@
-<rdf:RDF
-    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
-    xmlns:owl="http://www.w3.org/2002/07/owl#"
-    xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
-    xmlns:mm="http://mvm.com/owl/2010/10/mm.owl#"
-    xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" > 
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:perl-HTML-Tagset:perl-HTML-Tagset:2:36:3.10">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.10</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">perl-HTML-Tagset</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">perl-HTML-Tagset</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">HTML::Tagset - data tables useful in parsing HTML</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:bluez-utils:bluez-utils:2:36:3.7">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.7</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">bluez-utils</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">bluez-utils</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Bluetooth utilities</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:xorg-x11-server-Xnest:xorg-x11-server-Xnest:2:36:1.1.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.1.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">xorg-x11-server-Xnest</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">xorg-x11-server-Xnest</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A nested server.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:OpenIPMI:OpenIPMI:2:36:2.0.16">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.0.16</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">OpenIPMI</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">OpenIPMI</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">OpenIPMI (Intelligent Platform Management Interface) library and tools</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:glibc:glibc:2:36:2.5">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.5</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">glibc</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">glibc</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">The GNU libc libraries.</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libXfont:libXfont:2:36:1.2.2">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.2.2</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXfont</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXfont</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">X.Org X11 libXfont runtime library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:python-sqlite:python-sqlite:2:36:1.1.7">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.1.7</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">python-sqlite</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">python-sqlite</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Python bindings for sqlite.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libtiff:libtiff:2:36:3.8.2">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.8.2</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libtiff</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libtiff</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Library of functions for manipulating TIFF format image files</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libXTrap:libXTrap:2:36:1.0.0">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.0.0</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXTrap</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXTrap</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">X.Org X11 libXTrap runtime library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libXdmcp-devel:libXdmcp-devel:2:36:1.0.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.0.1</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXdmcp-devel</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXdmcp-devel</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">X.Org X11 libXdmcp development package</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:psmisc:psmisc:2:36:22.2">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">22.2</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">psmisc</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">psmisc</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Utilities for managing processes on your system.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:sblim-cmpi-network-test:sblim-cmpi-network-test:2:36:1.4.0">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.4.0</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">sblim-cmpi-network-test</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">sblim-cmpi-network-test</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">(none)</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">SBLIM Network Instrumentation Testcases</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:2044:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gpm</mm:name>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">/usr/sbin/gpm</mm:modulePath>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1</mm:parentProcessID>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gpm</mm:elementName>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2044</mm:handle>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">2044.0</mm:processSessionID>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processNiceValue>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">24.0</mm:priority>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">217110.0</mm:userModeTime>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-09-26T07:44:26.000+00:00</mm:creationDate>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">exps2</mm:parameters>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:coreutils:coreutils:2:36:5.97">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">5.97</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">coreutils</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">coreutils</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">The GNU core utilities: a set of tools commonly used in shell scripts</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:fontconfig-devel:fontconfig-devel:2:36:2.4.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.4.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">fontconfig-devel</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">fontconfig-devel</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Font configuration and customization library</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:cracklib:cracklib:2:36:2.8.9">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.8.9</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">cracklib</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">cracklib</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A password-checking library.</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:ruby-irb:ruby-irb:2:36:1.8.5">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.8.5</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ruby-irb</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ruby-irb</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">(none)</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">The Interactive Ruby.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:boost-devel:boost-devel:2:36:1.33.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.33.1</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">boost-devel</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">boost-devel</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">The Boost C++ headers and development libraries</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:1683:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">syslogd</mm:elementName>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0</mm:parameters>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-09-26T07:41:20.000+00:00</mm:creationDate>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1683</mm:handle>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1</mm:parentProcessID>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">2900.0</mm:userModeTime>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processNiceValue>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">syslogd</mm:name>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">/sbin/syslogd</mm:modulePath>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">23.0</mm:priority>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">1683.0</mm:processSessionID>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:minicom:minicom:2:36:2.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">minicom</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">minicom</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A text-based modem control and terminal emulation program.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libXinerama:libXinerama:2:36:1.0.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.0.1</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXinerama</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXinerama</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">X.Org X11 libXinerama runtime library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:vixie-cron:vixie-cron:2:36:4.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">4.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">vixie-cron</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">vixie-cron</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">The Vixie cron daemon for executing specified programs at set times.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:mcstrans:mcstrans:2:36:0.2.11">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.2.11</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">mcstrans</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">mcstrans</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">SELinux Translation Daemon</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:gnome-python2-gnomeprint:gnome-python2-gnomeprint:2:36:2.16.0">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.16.0</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gnome-python2-gnomeprint</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gnome-python2-gnomeprint</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Python bindings for interacting with libgnomeprint</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:23297:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2011-02-08T10:02:22.000+00:00</mm:creationDate>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">23297.0</mm:processSessionID>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">cmpiOSBase_NFSProvider</mm:parameters>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">/usr/sbin/cimprovagt</mm:modulePath>
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">cimprovagt</mm:elementName>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processNiceValue>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1</mm:parentProcessID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">cimprovagt</mm:name>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">23297</mm:handle>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">30.0</mm:userModeTime>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">10.0</mm:kernelModeTime>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">21.0</mm:priority>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libgail-gnome:libgail-gnome:2:36:1.1.3">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.1.3</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libgail-gnome</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libgail-gnome</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Accessibility implementation for GTK+ and GNOME libraries</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libXcursor-devel:libXcursor-devel:2:36:1.1.7">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.1.7</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXcursor-devel</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXcursor-devel</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">X.Org X11 libXcursor development package</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libtiff-devel:libtiff-devel:2:36:3.8.2">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.8.2</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libtiff-devel</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libtiff-devel</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Development tools for programs which will use the libtiff library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libgsf:libgsf:2:36:1.14.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.14.1</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libgsf</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libgsf</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">GNOME Structured File library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libmusicbrainz:libmusicbrainz:2:36:2.1.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.1.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libmusicbrainz</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libmusicbrainz</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Library for accessing MusicBrainz servers</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:util-linux:util-linux:2:36:2.13">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.13</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">util-linux</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">util-linux</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A collection of basic system utilities.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:1732:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-09-26T07:41:20.000+00:00</mm:creationDate>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">rpciod/0</mm:elementName>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">4.2949673E9</mm:processNiceValue>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">14</mm:parentProcessID>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">80.0</mm:userModeTime>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">[rpciod/0]</mm:parameters>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">rpciod/0</mm:name>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">rpciod/0</mm:modulePath>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">29.0</mm:priority>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">1.0</mm:processSessionID>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1732</mm:handle>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:paps:paps:2:36:0.6.6">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.6.6</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">paps</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">paps</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Plain Text to PostScript converter</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:authconfig-gtk:authconfig-gtk:2:36:5.3.21">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">5.3.21</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">authconfig-gtk</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">authconfig-gtk</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Graphical tool for setting up authentication from network services</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libXaw:libXaw:2:36:1.0.2">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.0.2</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXaw</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXaw</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">X.Org X11 libXaw runtime library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:jwhois:jwhois:2:36:3.2.3">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.2.3</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">jwhois</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">jwhois</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Internet whois/nicname client.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:gettext:gettext:2:36:0.14.6">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.14.6</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gettext</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gettext</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">GNU libraries and utilities for producing multi-lingual messages.</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:file:file:2:36:4.17">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">4.17</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">file</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">file</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A utility for determining file types.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:freeglut:freeglut:2:36:2.4.0">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.4.0</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">freeglut</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">freeglut</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A freely licensed alternative to the GLUT library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:automake:automake:2:36:1.9.6">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.9.6</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">automake</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">automake</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A GNU tool for automatically creating Makefiles.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:14396:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-10-01T07:12:45.000+00:00</mm:creationDate>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nautilus</mm:elementName>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">default3</mm:parameters>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">19480.0</mm:userModeTime>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nautilus</mm:modulePath>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">24.0</mm:priority>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">14396</mm:handle>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nautilus</mm:name>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processNiceValue>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">14396.0</mm:processSessionID>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1</mm:parentProcessID>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:321:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">19.0</mm:priority>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">scsi_eh_0</mm:name>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">14</mm:parentProcessID>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">1.0</mm:processSessionID>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">321</mm:handle>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">[scsi_eh_0]</mm:parameters>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:userModeTime>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-09-26T07:40:34.000+00:00</mm:creationDate>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">scsi_eh_0</mm:elementName>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">scsi_eh_0</mm:modulePath>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">4.2949673E9</mm:processNiceValue>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:1906:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">2833780.0</mm:userModeTime>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">/dev/hda</mm:parameters>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1906</mm:handle>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-09-26T07:41:23.000+00:00</mm:creationDate>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">1880.0</mm:processSessionID>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">hald-addon-storage:</mm:elementName>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">/usr/libexec/hald-addon-storage</mm:modulePath>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1881</mm:parentProcessID>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processNiceValue>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">21.0</mm:priority>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">hald-addon-storage:</mm:name>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:slang-devel:slang-devel:2:36:2.0.6">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.0.6</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">slang-devel</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">slang-devel</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">The static library and header files for development using S-Lang.</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:poppler:poppler:2:36:0.5.4">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.5.4</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">poppler</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">poppler</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">PDF rendering library</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:python-elementtree:python-elementtree:2:36:1.2.6">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.2.6</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">python-elementtree</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">python-elementtree</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Fast XML parser and writer</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:bluez-gnome:bluez-gnome:2:36:0.5">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.5</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">bluez-gnome</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">bluez-gnome</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Bluetooth pairing and control applet</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:groff:groff:2:36:1.18.1.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.18.1.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">groff</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">groff</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A document formatting system.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:esound:esound:2:36:0.2.36">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.2.36</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">esound</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">esound</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Allows several audio streams to play on a single audio device.</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:xorg-x11-drv-aiptek:xorg-x11-drv-aiptek:2:36:1.0.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.0.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">xorg-x11-drv-aiptek</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">xorg-x11-drv-aiptek</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Xorg X11 aiptek input driver</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:nss_db:nss_db:2:36:2.2">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.2</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nss_db</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nss_db</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">An NSS library for the Berkeley DB.</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:control-center:control-center:2:36:2.16.0">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.16.0</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">control-center</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">control-center</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">GNOME Control Center</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libselinux-devel:libselinux-devel:2:36:1.33.4">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.33.4</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libselinux-devel</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libselinux-devel</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Header files and libraries used to build SELinux</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:libXxf86vm:libXxf86vm:2:36:1.0.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.0.1</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXxf86vm</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">libXxf86vm</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">X.Org X11 libXxf86vm runtime library</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:CIM_NetworkPort:eth0:CIM_ComputerSystem:nimbus01.bullpen.net">
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">OK</mm:status>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">eth0</mm:elementName>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of available Ethernet Ports.</mm:description>
-    <mm:deviceID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">eth0</mm:deviceID>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:speed rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:speed>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_EthernetPort</mm:creationClassName>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#NetworkPort"/>
-    <mm:linkTechnology rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:linkTechnology>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">eth0</mm:name>
-    <mm:otherLinkTechnology rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherLinkTechnology>
-    <mm:maxSpeed rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:maxSpeed>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux EthernetPort</mm:caption>
-    <mm:systemCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:systemCreationClassName>
-    <mm:systemName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:systemName>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:elinks:elinks:2:36:0.11.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.11.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">elinks</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">elinks</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A text-mode Web browser.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:14328:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">21.0</mm:priority>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">14328</mm:handle>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">/etc/X11/xinit/Xclients"</mm:parameters>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">14328.0</mm:processSessionID>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">14292</mm:parentProcessID>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processNiceValue>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ssh-agent</mm:modulePath>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ssh-agent</mm:name>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-10-01T07:12:44.000+00:00</mm:creationDate>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">ssh-agent</mm:elementName>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">80.0</mm:userModeTime>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:sqlite:sqlite:2:36:3.3.6">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.3.6</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">sqlite</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">sqlite</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Library that implements an embeddable SQL database engine</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:gstreamer:gstreamer:2:36:0.10.20">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.10.20</mm:version>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gstreamer</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gstreamer</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">GStreamer streaming media framework runtime</mm:caption>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:alsa-utils:alsa-utils:2:36:1.0.17">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1.0.17</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">alsa-utils</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">alsa-utils</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Advanced Linux Sound Architecture (ALSA) utilities</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:qt:qt:2:36:3.3.6">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.3.6</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">qt</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">qt</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">The shared library for the Qt GUI toolkit.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:words:words:2:36:3.0">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">3.0</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">words</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">words</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">A dictionary of English words for the /usr/share/dict directory.</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_UnixProcess:CIM_ComputerSystem:nimbus01.bullpen.net:14431:Linux_OperatingSystem:nimbus01.bullpen.net">
-    <mm:processGroupID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processGroupID>
-    <mm:enabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledState>
-    <mm:priority rdf:datatype="http://www.w3.org/2001/XMLSchema#float">21.0</mm:priority>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux (Unix) Process</mm:caption>
-    <mm:handle rdf:datatype="http://www.w3.org/2001/XMLSchema#string">14431</mm:handle>
-    <mm:status rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:status>
-    <mm:creationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_UnixProcess</mm:creationClassName>
-    <mm:userModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">590.0</mm:userModeTime>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#Process"/>
-    <mm:CSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:CSName>
-    <mm:processSessionID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">14431.0</mm:processSessionID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">python</mm:name>
-    <mm:elementName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">python</mm:elementName>
-    <mm:enabledDefault rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:enabledDefault>
-    <mm:modulePath rdf:datatype="http://www.w3.org/2001/XMLSchema#string">python</mm:modulePath>
-    <mm:realUserID rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:realUserID>
-    <mm:otherEnabledState rdf:datatype="http://www.w3.org/2001/XMLSchema#string">NULL</mm:otherEnabledState>
-    <mm:parameters rdf:datatype="http://www.w3.org/2001/XMLSchema#string">/usr/bin/puplet</mm:parameters>
-    <mm:OSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_OperatingSystem</mm:OSCreationClassName>
-    <mm:creationDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2010-10-01T07:12:46.000+00:00</mm:creationDate>
-    <mm:processTTY rdf:datatype="http://www.w3.org/2001/XMLSchema#string">?</mm:processTTY>
-    <mm:executionState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">6</mm:executionState>
-    <mm:kernelModeTime rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:kernelModeTime>
-    <mm:description rdf:datatype="http://www.w3.org/2001/XMLSchema#string">This class represents instances of currently running programms.</mm:description>
-    <mm:transitioningToState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">12</mm:transitioningToState>
-    <mm:processNiceValue rdf:datatype="http://www.w3.org/2001/XMLSchema#float">0.0</mm:processNiceValue>
-    <mm:CSCreationClassName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Linux_ComputerSystem</mm:CSCreationClassName>
-    <mm:OSName rdf:datatype="http://www.w3.org/2001/XMLSchema#string">nimbus01.bullpen.net</mm:OSName>
-    <mm:parentProcessID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">1</mm:parentProcessID>
-    <mm:requestedState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:requestedState>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:desktop-file-utils:desktop-file-utils:2:36:0.10">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">0.10</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">desktop-file-utils</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">desktop-file-utils</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Red Hat, Inc. &lt;http://bugzilla.redhat.com/bugzilla&gt;</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">Utilities for manipulating .desktop files</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Description rdf:about="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:tog-pegasus:tog-pegasus:2:36:2.9.1">
-    <mm:version rdf:datatype="http://www.w3.org/2001/XMLSchema#string">2.9.1</mm:version>
-    <mm:targetOperatingSystem rdf:datatype="http://www.w3.org/2001/XMLSchema#int">36</mm:targetOperatingSystem>
-    <mm:softwareElementState rdf:datatype="http://www.w3.org/2001/XMLSchema#int">2</mm:softwareElementState>
-    <mm:softwareElementID rdf:datatype="http://www.w3.org/2001/XMLSchema#string">tog-pegasus</mm:softwareElementID>
-    <mm:name rdf:datatype="http://www.w3.org/2001/XMLSchema#string">tog-pegasus</mm:name>
-    <mm:manufacturer rdf:datatype="http://www.w3.org/2001/XMLSchema#string">(none)</mm:manufacturer>
-    <mm:caption rdf:datatype="http://www.w3.org/2001/XMLSchema#string">OpenPegasus WBEM Services for Linux</mm:caption>
-    <rdf:type rdf:resource="http://mvm.com/owl/2010/10/mm.owl#SoftwareElement"/>
-  </rdf:Description>
-  <rdf:Descri

<TRUNCATED>


[56/56] [abbrv] incubator-rya git commit: RYA-10 reusing existing collection for geo queries

Posted by mi...@apache.org.
RYA-10 reusing existing collection for geo queries


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/7743a42a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/7743a42a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/7743a42a

Branch: refs/heads/master
Commit: 7743a42a5febe440107e037aadc3753643708e83
Parents: 1007611
Author: pujav65 <pu...@gmail.com>
Authored: Mon Dec 21 22:23:34 2015 -0500
Committer: Aaron Mihalik <mi...@alum.mit.edu>
Committed: Tue Dec 22 11:48:32 2015 -0500

----------------------------------------------------------------------
 .../dao/SimpleMongoDBStorageStrategy.java       |  18 +-
 .../java/mvm/rya/mongodb/MongoDBRyaDAOTest.java |  18 +
 .../indexing/mongodb/AbstractMongoIndexer.java  |   5 -
 .../mongodb/GeoMongoDBStorageStrategy.java      |  51 +--
 .../rya/indexing/mongodb/MongoGeoIndexer.java   | 423 ++++++++++---------
 .../indexing/mongo/MongoGeoIndexerSfTest.java   | 306 ++++++++++++++
 .../rya/indexing/mongo/MongoGeoIndexerTest.java | 390 +++++++++++++++++
 7 files changed, 958 insertions(+), 253 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7743a42a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
index 3ecc0dc..74c8366 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
@@ -38,13 +38,13 @@ import com.mongodb.DBObject;
 
 public class SimpleMongoDBStorageStrategy implements MongoDBStorageStrategy {
 
-	private static final String ID = "_id";
-	private static final String OBJECT_TYPE = "objectType";
-	private static final String CONTEXT = "context";
-	private static final String PREDICATE = "predicate";
-	private static final String OBJECT = "object";
-	private static final String SUBJECT = "subject";
-	private ValueFactoryImpl factory = new ValueFactoryImpl();
+	protected static final String ID = "_id";
+	protected static final String OBJECT_TYPE = "objectType";
+	protected static final String CONTEXT = "context";
+	protected static final String PREDICATE = "predicate";
+	protected static final String OBJECT = "object";
+	protected static final String SUBJECT = "subject";
+	protected ValueFactoryImpl factory = new ValueFactoryImpl();
 
 
 	public SimpleMongoDBStorageStrategy() {
@@ -115,6 +115,10 @@ public class SimpleMongoDBStorageStrategy implements MongoDBStorageStrategy {
 
 	@Override
 	public DBObject serialize(RyaStatement statement){
+		return serializeInternal(statement);		
+	}
+
+	public BasicDBObject serializeInternal(RyaStatement statement){
 		String context = "";
 		if (statement.getContext() != null){
 			context = statement.getContext().getData();

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7743a42a/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java b/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java
index 3d900b0..86c01e2 100644
--- a/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java
+++ b/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java
@@ -1,4 +1,22 @@
 package mvm.rya.mongodb;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
 
 import static org.junit.Assert.assertEquals;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7743a42a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
index 4a708ab..0f8202c 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
@@ -61,11 +61,6 @@ public abstract class AbstractMongoIndexer implements RyaSecondaryIndexer {
     }
 
     @Override
-    public void deleteStatement(RyaStatement stmt) throws IOException {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
     public void dropGraph(RyaURI... graphs) {
         throw new UnsupportedOperationException();
     }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7743a42a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
index 0355225..c21f574 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
@@ -27,9 +27,12 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RdfToRyaConversions;
 import mvm.rya.indexing.StatementContraints;
 import mvm.rya.indexing.accumulo.StatementSerializer;
 import mvm.rya.indexing.accumulo.geo.GeoParseUtils;
+import mvm.rya.mongodb.dao.SimpleMongoDBStorageStrategy;
 
 import org.apache.commons.codec.binary.Hex;
 import org.openrdf.model.Statement;
@@ -44,14 +47,9 @@ import com.vividsolutions.jts.geom.Geometry;
 import com.vividsolutions.jts.io.ParseException;
 import com.vividsolutions.jts.io.WKTReader;
 
-public class GeoMongoDBStorageStrategy {
+public class GeoMongoDBStorageStrategy extends SimpleMongoDBStorageStrategy{
 
-	private static final String ID = "_id";
 	private static final String GEO = "location";
-	private static final String CONTEXT = "context";
-	private static final String PREDICATE = "predicate";
-	private static final String OBJECT = "object";
-	private static final String SUBJECT = "subject";
 	public enum GeoQueryType {
 		INTERSECTS {
 			public String getKeyword() {
@@ -124,21 +122,6 @@ public class GeoMongoDBStorageStrategy {
 		return query;
 	}
 
-
-	public Statement deserializeDBObject(DBObject queryResult) {
-		Map result = queryResult.toMap();
-		String subject = (String) result.get(SUBJECT);
-		String object = (String) result.get(OBJECT);
-		String predicate = (String) result.get(PREDICATE);
-		String context = (String) result.get(CONTEXT);
-		if (!context.isEmpty()){
-			return StatementSerializer.readStatement(subject, predicate, object, context);			
-		}
-		return StatementSerializer.readStatement(subject, predicate, object);
-	}
-	
-	
-
 	public DBObject serialize(Statement statement) throws ParseException{
 		// if the object is wkt, then try to index it
         // write the statement data to the fields
@@ -146,28 +129,10 @@ public class GeoMongoDBStorageStrategy {
         if(geo == null || geo.isEmpty() || !geo.isValid()) {
             throw new ParseException("Could not create geometry for statement " + statement);
         }
- 		
-		String context = "";
-		if (statement.getContext() != null){
-			context = StatementSerializer.writeContext(statement);
-		}
-		String id = StatementSerializer.writeSubject(statement) + " " + 
-				StatementSerializer.writePredicate(statement) + " " +  StatementSerializer.writeObject(statement) + " " + context;
-		byte[] bytes = id.getBytes();
-		try {
-			MessageDigest digest = MessageDigest.getInstance("SHA-1");
-			bytes = digest.digest(bytes);
-		} catch (NoSuchAlgorithmException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		BasicDBObject doc = new BasicDBObject(ID, new String(Hex.encodeHex(bytes)))
-		.append(GEO, getCorrespondingPoints(geo))
-		.append(SUBJECT, StatementSerializer.writeSubject(statement))
-	    .append(PREDICATE, StatementSerializer.writePredicate(statement))
-	    .append(OBJECT,  StatementSerializer.writeObject(statement))
-	    .append(CONTEXT, context);
-		return doc;
+ 		RyaStatement ryaStatement = RdfToRyaConversions.convertStatement(statement);
+ 		BasicDBObject base = (BasicDBObject) super.serialize(ryaStatement);
+ 		base.append(GEO, getCorrespondingPoints(geo));	
+		return base;
 		
 	}
 	

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7743a42a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
index c36b125..199883f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
@@ -19,7 +19,6 @@ package mvm.rya.indexing.mongodb;
  * under the License.
  */
 
-
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
@@ -28,6 +27,7 @@ import java.util.Arrays;
 import java.util.Set;
 
 import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RdfToRyaConversions;
 import mvm.rya.api.resolver.RyaToRdfConversions;
 import mvm.rya.indexing.GeoIndexer;
 import mvm.rya.indexing.StatementContraints;
@@ -51,209 +51,236 @@ import com.mongodb.MongoCredential;
 import com.mongodb.ServerAddress;
 import com.vividsolutions.jts.geom.Geometry;
 
-public class MongoGeoIndexer extends AbstractMongoIndexer implements GeoIndexer{
-    
-    private static final Logger logger = Logger.getLogger(MongoGeoIndexer.class);
-    
-    private GeoMongoDBStorageStrategy storageStrategy;
-    private MongoClient mongoClient;
-    private DB db;
-    private DBCollection coll;
-    private Set<URI> predicates;
-    private Configuration conf;
-    private boolean isInit = false;
-    private String tableName = "";
-
-    
-    
-    private void init() throws NumberFormatException, UnknownHostException{
-        ServerAddress server = new ServerAddress(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE),
-                Integer.valueOf(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT)));
-        this.conf = conf;
-        if (conf.get(MongoDBRdfConfiguration.MONGO_USER) != null){
-            MongoCredential cred = MongoCredential.createCredential(conf.get(MongoDBRdfConfiguration.MONGO_USER), conf.get(MongoDBRdfConfiguration.MONGO_USER_PASSWORD),
-                    conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME).toCharArray());
-            mongoClient = new MongoClient(server, Arrays.asList(cred));
-        }
-        else {
-            mongoClient = new MongoClient(server);
+import de.flapdoodle.embed.mongo.distribution.Version;
+import de.flapdoodle.embed.mongo.tests.MongodForTestsFactory;
+
+public class MongoGeoIndexer extends AbstractMongoIndexer implements GeoIndexer {
+
+	private static final Logger logger = Logger
+			.getLogger(MongoGeoIndexer.class);
+
+	private GeoMongoDBStorageStrategy storageStrategy;
+	private MongoClient mongoClient;
+	private DB db;
+	private DBCollection coll;
+	private Set<URI> predicates;
+	private Configuration conf;
+	private boolean isInit = false;
+	private String tableName = "";
+
+	private MongodForTestsFactory testsFactory;
+
+	private void init() throws NumberFormatException, IOException{
+        boolean useMongoTest = conf.getBoolean(MongoDBRdfConfiguration.USE_TEST_MONGO, false);
+        if (useMongoTest) {
+            testsFactory = MongodForTestsFactory.with(Version.Main.PRODUCTION);
+            mongoClient = testsFactory.newMongo();
+            int port = mongoClient.getServerAddressList().get(0).getPort();
+            conf.set(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT, Integer.toString(port));
+        } else {
+            ServerAddress server = new ServerAddress(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE),
+                    Integer.valueOf(conf.get(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT)));
+            if (conf.get(MongoDBRdfConfiguration.MONGO_USER) != null) {
+                MongoCredential cred = MongoCredential.createCredential(
+                        conf.get(MongoDBRdfConfiguration.MONGO_USER),
+                        conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME),
+                        conf.get(MongoDBRdfConfiguration.MONGO_USER_PASSWORD).toCharArray());
+                mongoClient = new MongoClient(server, Arrays.asList(cred));
+            } else {
+                mongoClient = new MongoClient(server);
+            }
         }
         predicates = ConfigUtils.getGeoPredicates(conf);
         tableName = conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME);
         db = mongoClient.getDB(tableName);
-        coll = db.getCollection(conf.get(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya") + "_geo");
+        coll = db.getCollection(conf.get(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya"));
         storageStrategy = new GeoMongoDBStorageStrategy(Double.valueOf(conf.get(MongoDBRdfConfiguration.MONGO_GEO_MAXDISTANCE, "1e-10")));
     }
-    
-
-    @Override
-    public String getTableName() {
-        return tableName;
-    }
-    
-    @Override
-    public Configuration getConf() {
-        return conf;
-    }
-    
-    //setConf initializes because index is created via reflection
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-        if (!isInit) {
-            try {
-                init();
-                isInit = true;
-            } catch (NumberFormatException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (UnknownHostException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            }
-        }
-    }
-    
-
-    
-    private void storeStatement(Statement statement) throws IOException {
-        // if this is a valid predicate and a valid geometry
-        boolean isValidPredicate = predicates.isEmpty() || predicates.contains(statement.getPredicate());
-
-        if (isValidPredicate && (statement.getObject() instanceof Literal)) {
-            
-            // add it to the collection
-            try {
-                DBObject obj = storageStrategy.serialize(statement);
-                if (obj != null){
-                    coll.insert(obj);
-                }
-            }
-            catch (com.mongodb.MongoException.DuplicateKey exception){
-                // ignore
-            }
-            catch (com.mongodb.DuplicateKeyException exception){
-                // ignore
-            }
-            catch (Exception ex){
-                // ignore single exceptions
-                ex.printStackTrace();
-            }
-        }
-    }
-    
-    
-    @Override
-    public void storeStatement(RyaStatement statement) throws IOException {
-        storeStatement(RyaToRdfConversions.convertStatement(statement));
-    }
-    
-    
-
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryEquals(
-            Geometry query, StatementContraints contraints) {
-        DBObject queryObj = storageStrategy.getQuery(contraints, query, GeoQueryType.EQUALS);
-        return getIteratorWrapper(queryObj, coll, storageStrategy);
-    }
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryDisjoint(
-            Geometry query, StatementContraints contraints) {
-        throw new UnsupportedOperationException("Disjoint queries are not supported in Mongo DB.");
-    }
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryIntersects(
-                Geometry query, StatementContraints contraints) {
-        DBObject queryObj = storageStrategy.getQuery(contraints, query, GeoQueryType.INTERSECTS);
-        return getIteratorWrapper(queryObj, coll, storageStrategy);
-    }
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryTouches(
-            Geometry query, StatementContraints contraints) {
-        throw new UnsupportedOperationException("Touches queries are not supported in Mongo DB.");
-    }
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryCrosses(
-            Geometry query, StatementContraints contraints) {
-        throw new UnsupportedOperationException("Crosses queries are not supported in Mongo DB.");
-    }
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryWithin(
-            Geometry query, StatementContraints contraints) {
-        DBObject queryObj = storageStrategy.getQuery(contraints, query, GeoQueryType.WITHIN);
-        return getIteratorWrapper(queryObj, coll, storageStrategy);
-    }
-    
-    
-    private CloseableIteration<Statement, QueryEvaluationException> getIteratorWrapper(final DBObject query, final DBCollection coll, final GeoMongoDBStorageStrategy storageStrategy) {
-
-        return new CloseableIteration<Statement, QueryEvaluationException>() {
-
-            private DBCursor cursor = null;
-
-            private DBCursor getIterator() throws QueryEvaluationException {
-                if (cursor == null){
-                    cursor = coll.find(query);
-                }
-                return cursor;
-            }
-
-            @Override
-            public boolean hasNext() throws QueryEvaluationException {
-                return getIterator().hasNext();
-            }
-
-            @Override
-            public Statement next() throws QueryEvaluationException {
-                DBObject feature = getIterator().next();
-                return storageStrategy.deserializeDBObject(feature);
-            }
-
-            @Override
-            public void remove() {
-                throw new UnsupportedOperationException("Remove not implemented");
-            }
-
-            @Override
-            public void close() throws QueryEvaluationException {
-                getIterator().close();
-            }
-        };
-    }
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryContains(
-            Geometry query, StatementContraints contraints) {
-        throw new UnsupportedOperationException("Contains queries are not supported in Mongo DB.");
-    }
-
-    @Override
-    public CloseableIteration<Statement, QueryEvaluationException> queryOverlaps(
-            Geometry query, StatementContraints contraints) {
-        throw new UnsupportedOperationException("Overlaps queries are not supported in Mongo DB.");
-    }
 
-    @Override
-    public Set<URI> getIndexablePredicates() {
-        return predicates;
-    }
-
-    @Override
-    public void flush() throws IOException {
-        // TODO Auto-generated method stub
-
-    }
-
-    @Override
-    public void close() throws IOException {
-        mongoClient.close();
-    }
+	@Override
+	public String getTableName() {
+		return tableName;
+	}
+
+	@Override
+	public Configuration getConf() {
+		return conf;
+	}
+
+	// setConf initializes because index is created via reflection
+	@Override
+	public void setConf(Configuration conf) {
+		this.conf = conf;
+		if (!isInit) {
+			try {
+				init();
+				isInit = true;
+			} catch (NumberFormatException e) {
+				logger.warn(
+						"Unable to initialize index.  Throwing Runtime Exception. ",
+						e);
+				throw new RuntimeException(e);
+			} catch (IOException e) {
+				logger.warn(
+						"Unable to initialize index.  Throwing Runtime Exception. ",
+						e);
+				throw new RuntimeException(e);
+			}
+		}
+	}
+
+	private void storeStatement(Statement statement) throws IOException {
+		// if this is a valid predicate and a valid geometry
+		boolean isValidPredicate = predicates.isEmpty()
+				|| predicates.contains(statement.getPredicate());
+
+		if (isValidPredicate && (statement.getObject() instanceof Literal)) {
+
+			// add it to the collection
+			try {
+				DBObject obj = storageStrategy.serialize(statement);
+				if (obj != null) {
+					DBObject query = storageStrategy
+							.getQuery(RdfToRyaConversions
+									.convertStatement(statement));
+					coll.update(query, obj, true, false);
+				}
+			} catch (com.mongodb.MongoException.DuplicateKey exception) {
+				// ignore
+			} catch (com.mongodb.DuplicateKeyException exception) {
+				// ignore
+			} catch (Exception ex) {
+				// ignore single exceptions
+				ex.printStackTrace();
+			}
+		}
+	}
+
+	@Override
+	public void storeStatement(RyaStatement statement) throws IOException {
+		storeStatement(RyaToRdfConversions.convertStatement(statement));
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryEquals(
+			Geometry query, StatementContraints contraints) {
+		DBObject queryObj = storageStrategy.getQuery(contraints, query,
+				GeoQueryType.EQUALS);
+		return getIteratorWrapper(queryObj, coll, storageStrategy);
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryDisjoint(
+			Geometry query, StatementContraints contraints) {
+		throw new UnsupportedOperationException(
+				"Disjoint queries are not supported in Mongo DB.");
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryIntersects(
+			Geometry query, StatementContraints contraints) {
+		DBObject queryObj = storageStrategy.getQuery(contraints, query,
+				GeoQueryType.INTERSECTS);
+		return getIteratorWrapper(queryObj, coll, storageStrategy);
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryTouches(
+			Geometry query, StatementContraints contraints) {
+		throw new UnsupportedOperationException(
+				"Touches queries are not supported in Mongo DB.");
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryCrosses(
+			Geometry query, StatementContraints contraints) {
+		throw new UnsupportedOperationException(
+				"Crosses queries are not supported in Mongo DB.");
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryWithin(
+			Geometry query, StatementContraints contraints) {
+		DBObject queryObj = storageStrategy.getQuery(contraints, query,
+				GeoQueryType.WITHIN);
+		return getIteratorWrapper(queryObj, coll, storageStrategy);
+	}
+
+	private CloseableIteration<Statement, QueryEvaluationException> getIteratorWrapper(
+			final DBObject query, final DBCollection coll,
+			final GeoMongoDBStorageStrategy storageStrategy) {
+
+		return new CloseableIteration<Statement, QueryEvaluationException>() {
+
+			private DBCursor cursor = null;
+
+			private DBCursor getIterator() throws QueryEvaluationException {
+				if (cursor == null) {
+					cursor = coll.find(query);
+				}
+				return cursor;
+			}
+
+			@Override
+			public boolean hasNext() throws QueryEvaluationException {
+				return getIterator().hasNext();
+			}
+
+			@Override
+			public Statement next() throws QueryEvaluationException {
+				DBObject feature = getIterator().next();
+				RyaStatement statement = storageStrategy
+						.deserializeDBObject(feature);
+				return RyaToRdfConversions.convertStatement(statement);
+			}
+
+			@Override
+			public void remove() {
+				throw new UnsupportedOperationException(
+						"Remove not implemented");
+			}
+
+			@Override
+			public void close() throws QueryEvaluationException {
+				getIterator().close();
+			}
+		};
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryContains(
+			Geometry query, StatementContraints contraints) {
+		throw new UnsupportedOperationException(
+				"Contains queries are not supported in Mongo DB.");
+	}
+
+	@Override
+	public CloseableIteration<Statement, QueryEvaluationException> queryOverlaps(
+			Geometry query, StatementContraints contraints) {
+		throw new UnsupportedOperationException(
+				"Overlaps queries are not supported in Mongo DB.");
+	}
+
+	@Override
+	public Set<URI> getIndexablePredicates() {
+		return predicates;
+	}
+
+	@Override
+	public void flush() throws IOException {
+		// TODO Auto-generated method stub
+
+	}
+
+	@Override
+	public void close() throws IOException {
+		mongoClient.close();
+	}
+
+	@Override
+	public void deleteStatement(RyaStatement stmt) throws IOException {
+	   DBObject obj = storageStrategy.getQuery(stmt);
+	   coll.remove(obj);
+	}
 
-    
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7743a42a/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerSfTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerSfTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerSfTest.java
new file mode 100644
index 0000000..7e1eaec
--- /dev/null
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerSfTest.java
@@ -0,0 +1,306 @@
+package mvm.rya.indexing.mongo;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RdfToRyaConversions;
+import mvm.rya.api.resolver.RyaToRdfConversions;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.geo.GeoConstants;
+import mvm.rya.indexing.accumulo.geo.GeoMesaGeoIndexer;
+import mvm.rya.indexing.mongodb.MongoGeoIndexer;
+import mvm.rya.mongodb.MongoDBRdfConfiguration;
+
+import org.apache.accumulo.core.client.admin.TableOperations;
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.vividsolutions.jts.geom.Coordinate;
+import com.vividsolutions.jts.geom.Geometry;
+import com.vividsolutions.jts.geom.GeometryFactory;
+import com.vividsolutions.jts.geom.LineString;
+import com.vividsolutions.jts.geom.LinearRing;
+import com.vividsolutions.jts.geom.Point;
+import com.vividsolutions.jts.geom.Polygon;
+import com.vividsolutions.jts.geom.PrecisionModel;
+import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence;
+
+/**
+ * Tests all of the "simple functions" of the geoindexer.
+ */
+public class MongoGeoIndexerSfTest {
+    private static Configuration conf;
+    private static GeometryFactory gf = new GeometryFactory(new PrecisionModel(), 4326);
+    private static MongoGeoIndexer g;
+
+    private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints();
+
+    // Here is the landscape:
+    /**
+     * <pre>
+     * 	 +---+---+---+---+---+---+---+
+     * 	 |        F          |       |
+     * 	 +  A    +           +   C   +
+     * 	 |                   |       |
+     * 	 +---+---+    E      +---+---+
+     * 	 |       |   /       |
+     * 	 +   B   +  /+---+---+
+     * 	 |       | / |       |
+     * 	 +---+---+/--+---+---+
+     * 	         /   |     D |
+     * 	        /    +---+---+
+     * </pre>
+     **/
+
+    private static final Polygon A = poly(bbox(0, 1, 4, 5));
+    private static final Polygon B = poly(bbox(0, 1, 2, 3));
+    private static final Polygon C = poly(bbox(4, 3, 6, 5));
+    private static final Polygon D = poly(bbox(3, 0, 5, 2));
+
+    private static final Point F = point(2, 4);
+
+    private static final LineString E = line(2, 0, 3, 3);
+
+    private static final Map<Geometry, String> names = Maps.newHashMap();
+    static {
+        names.put(A, "A");
+        names.put(B, "B");
+        names.put(C, "C");
+        names.put(D, "D");
+        names.put(E, "E");
+        names.put(F, "F");
+    }
+
+    @Before
+    public void before() throws Exception {
+        System.out.println(UUID.randomUUID().toString());
+        conf = new Configuration();
+        conf.set(ConfigUtils.USE_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, "test");
+        conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya_");
+        conf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT");
+        conf.set(ConfigUtils.USE_GEO, "true");
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, "rya_");
+
+        g = new MongoGeoIndexer();
+        g.setConf(conf);
+        g.storeStatement(statement(A));
+        g.storeStatement(statement(B));
+        g.storeStatement(statement(C));
+        g.storeStatement(statement(D));
+        g.storeStatement(statement(F));
+        g.storeStatement(statement(E));
+    }
+
+    private static RyaStatement statement(Geometry geo) {
+        ValueFactory vf = new ValueFactoryImpl();
+        Resource subject = vf.createURI("uri:" + names.get(geo));
+        URI predicate = GeoConstants.GEO_AS_WKT;
+        Value object = vf.createLiteral(geo.toString(), GeoConstants.XMLSCHEMA_OGC_WKT);
+        return RdfToRyaConversions.convertStatement(new StatementImpl(subject, predicate, object));
+
+    }
+
+    private static Point point(double x, double y) {
+        return gf.createPoint(new Coordinate(x, y));
+    }
+
+    private static LineString line(double x1, double y1, double x2, double y2) {
+        return new LineString(new PackedCoordinateSequence.Double(new double[] { x1, y1, x2, y2 }, 2), gf);
+    }
+
+    private static Polygon poly(double[] arr) {
+        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(arr, 2));
+        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+        return p1;
+    }
+
+    private static double[] bbox(double x1, double y1, double x2, double y2) {
+        return new double[] { x1, y1, x1, y2, x2, y2, x2, y1, x1, y1 };
+    }
+
+    public void compare(CloseableIteration<Statement, ?> actual, Geometry... expected) throws Exception {
+        Set<Statement> expectedSet = Sets.newHashSet();
+        for (Geometry geo : expected) {
+            expectedSet.add(RyaToRdfConversions.convertStatement(statement(geo)));
+        }
+
+        Assert.assertEquals(expectedSet, getSet(actual));
+    }
+
+    private static <X> Set<X> getSet(CloseableIteration<X, ?> iter) throws Exception {
+        Set<X> set = new HashSet<X>();
+        while (iter.hasNext()) {
+            set.add(iter.next());
+        }
+        return set;
+    }
+
+    private static Geometry[] EMPTY_RESULTS = {};
+
+    @Test
+    public void testEquals() throws Exception {
+        // point
+        compare(g.queryEquals(F, EMPTY_CONSTRAINTS), F);
+        compare(g.queryEquals(point(2, 2), EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+
+        // line
+        compare(g.queryEquals(E, EMPTY_CONSTRAINTS), E);
+        compare(g.queryEquals(line(2, 2, 3, 3), EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+
+        // poly
+        compare(g.queryEquals(A, EMPTY_CONSTRAINTS), A);
+        compare(g.queryEquals(poly(bbox(1, 1, 4, 5)), EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+
+    }
+
+//    @Test
+//    public void testDisjoint() throws Exception {
+//        // point
+//        compare(g.queryDisjoint(F, EMPTY_CONSTRAINTS), B, C, D, E);
+//
+//        // line
+//        compare(g.queryDisjoint(E, EMPTY_CONSTRAINTS), B, C, D, F);
+//
+//        // poly
+//        compare(g.queryDisjoint(A, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+//        compare(g.queryDisjoint(B, EMPTY_CONSTRAINTS), C, D, F, E);
+//    }
+
+    @Test
+    public void testIntersectsPoint() throws Exception {
+        // This seems like a bug
+        // compare(g.queryIntersects(F, EMPTY_CONSTRAINTS), A, F);
+        // compare(g.queryIntersects(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+    }
+
+    @Test
+    public void testIntersectsLine() throws Exception {
+        // This seems like a bug
+        // compare(g.queryIntersects(E, EMPTY_CONSTRAINTS), A, E);
+        // compare(g.queryIntersects(E, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+    }
+
+//    @Test
+//    public void testIntersectsPoly() throws Exception {
+//        compare(g.queryIntersects(A, EMPTY_CONSTRAINTS), A, B, C, D, F, E);
+//    }
+
+//    @Test
+//    public void testTouchesPoint() throws Exception {
+//        compare(g.queryTouches(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+//    }
+//
+//    @Test
+//    public void testTouchesLine() throws Exception {
+//        compare(g.queryTouches(E, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+//    }
+
+//    @Test
+//    public void testTouchesPoly() throws Exception {
+//        compare(g.queryTouches(A, EMPTY_CONSTRAINTS), C);
+//    }
+
+//    @Test
+//    public void testCrossesPoint() throws Exception {
+//        compare(g.queryCrosses(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+//    }
+
+    @Test
+    public void testCrossesLine() throws Exception {
+        // compare(g.queryCrosses(E, EMPTY_CONSTRAINTS), A);
+    }
+
+//    @Test
+//    public void testCrossesPoly() throws Exception {
+//        compare(g.queryCrosses(A, EMPTY_CONSTRAINTS), E);
+//    }
+
+//    @Test
+//    public void testWithin() throws Exception {
+//        // point
+//  //      compare(g.queryWithin(F, EMPTY_CONSTRAINTS), F);
+//
+//        // line
+////        compare(g.queryWithin(E, EMPTY_CONSTRAINTS), E);
+//
+//        // poly
+//        compare(g.queryWithin(A, EMPTY_CONSTRAINTS), A, B, F);
+//    }
+
+//    @Test
+//    public void testContainsPoint() throws Exception {
+//        compare(g.queryContains(F, EMPTY_CONSTRAINTS), A, F);
+//    }
+
+    @Test
+    public void testContainsLine() throws Exception {
+        // compare(g.queryContains(E, EMPTY_CONSTRAINTS), E);
+    }
+
+//    @Test
+//    public void testContainsPoly() throws Exception {
+//        compare(g.queryContains(A, EMPTY_CONSTRAINTS), A);
+//        compare(g.queryContains(B, EMPTY_CONSTRAINTS), A, B);
+//    }
+
+    @Test
+    public void testOverlapsPoint() throws Exception {
+        // compare(g.queryOverlaps(F, EMPTY_CONSTRAINTS), F);
+        // You cannot have overlapping points
+        // compare(g.queryOverlaps(F, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+    }
+
+    @Test
+    public void testOverlapsLine() throws Exception {
+        // compare(g.queryOverlaps(E, EMPTY_CONSTRAINTS), A, E);
+        // You cannot have overlapping lines
+        // compare(g.queryOverlaps(E, EMPTY_CONSTRAINTS), EMPTY_RESULTS);
+    }
+
+//    @Test
+//    public void testOverlapsPoly() throws Exception {
+//        compare(g.queryOverlaps(A, EMPTY_CONSTRAINTS), D);
+//    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7743a42a/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerTest.java
new file mode 100644
index 0000000..4075b29
--- /dev/null
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/mongo/MongoGeoIndexerTest.java
@@ -0,0 +1,390 @@
+package mvm.rya.indexing.mongo;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement;
+import info.aduna.iteration.CloseableIteration;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.geo.GeoConstants;
+import mvm.rya.indexing.mongodb.MongoGeoIndexer;
+import mvm.rya.mongodb.MongoDBRdfConfiguration;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ContextStatementImpl;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+
+import com.google.common.collect.Sets;
+import com.vividsolutions.jts.geom.Coordinate;
+import com.vividsolutions.jts.geom.GeometryFactory;
+import com.vividsolutions.jts.geom.LinearRing;
+import com.vividsolutions.jts.geom.Point;
+import com.vividsolutions.jts.geom.Polygon;
+import com.vividsolutions.jts.geom.PrecisionModel;
+import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence;
+
+public class MongoGeoIndexerTest {
+
+    private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints();
+
+    Configuration conf;
+    GeometryFactory gf = new GeometryFactory(new PrecisionModel(), 4326);
+
+    @Before
+    public void before() throws Exception {
+        conf = new Configuration();
+        conf.set(ConfigUtils.USE_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, "test");
+        conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya_");
+        conf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT");
+        conf.set(ConfigUtils.USE_GEO, "true");
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, "rya_");
+    }
+
+    @Test
+    public void testRestrictPredicatesSearch() throws Exception {
+        conf.setStrings(ConfigUtils.GEO_PREDICATES_LIST, "pred:1,pred:2");
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+
+            Point point = gf.createPoint(new Coordinate(10, 10));
+            Value pointValue = vf.createLiteral("Point(10 10)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            URI invalidPredicate = GeoConstants.GEO_AS_WKT;
+
+            // These should not be stored because they are not in the predicate list
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), invalidPredicate, pointValue)));
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), invalidPredicate, pointValue)));
+
+            URI pred1 = vf.createURI("pred:1");
+            URI pred2 = vf.createURI("pred:2");
+
+            // These should be stored because they are in the predicate list
+            Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1, pointValue);
+            Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2, pointValue);
+            f.storeStatement(convertStatement(s3));
+            f.storeStatement(convertStatement(s4));
+
+            // This should not be stored because the object is not valid wkt
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1, vf.createLiteral("soint(10 10)"))));
+
+            // This should not be stored because the object is not a literal
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj6"), pred1, vf.createURI("p:Point(10 10)"))));
+
+            f.flush();
+
+            Set<Statement> actual = getSet(f.queryEquals(point, EMPTY_CONSTRAINTS));
+            Assert.assertEquals(2, actual.size());
+            Assert.assertTrue(actual.contains(s3));
+            Assert.assertTrue(actual.contains(s4));
+        }
+    }
+
+    private static <X> Set<X> getSet(CloseableIteration<X, ?> iter) throws Exception {
+        Set<X> set = new HashSet<X>();
+        while (iter.hasNext()) {
+            set.add(iter.next());
+        }
+        return set;
+    }
+
+    @Test
+    public void testPrimeMeridianSearch() throws Exception {
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(0 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] ONE = { 1, 1, -1, 1, -1, -1, 1, -1, 1, 1 };
+            double[] TWO = { 2, 2, -2, 2, -2, -2, 2, -2, 2, 2 };
+            double[] THREE = { 3, 3, -3, 3, -3, -3, 3, -3, 3, 3 };
+
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+            LinearRing r2 = gf.createLinearRing(new PackedCoordinateSequence.Double(TWO, 2));
+            LinearRing r3 = gf.createLinearRing(new PackedCoordinateSequence.Double(THREE, 2));
+
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Polygon p2 = gf.createPolygon(r2, new LinearRing[] {});
+            Polygon p3 = gf.createPolygon(r3, new LinearRing[] {});
+
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p2, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p3, EMPTY_CONSTRAINTS)));
+
+            // Test a ring with a hole in it
+            Polygon p3m2 = gf.createPolygon(r3, new LinearRing[] { r2 });
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p3m2, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            double[] OUT = { 3, 3, 1, 3, 1, 1, 3, 1, 3, 3 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDcSearch() throws Exception {
+        // test a ring around dc
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            double[] OUT = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDeleteSearch() throws Exception {
+        // test a ring around dc
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            f.deleteStatement(convertStatement(statement));
+
+            // test a ring that the point would be inside of if not deleted
+            double[] in = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(in, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring that the point would be outside of if not deleted
+            double[] out = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(out, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+
+            // test a ring for the whole world and make sure the point is gone
+            // Geomesa is a little sensitive around lon 180, so we only go to 179
+            double[] world = { -180, 90, 179, 90, 179, -90, -180, -90, -180, 90 };
+            LinearRing rWorld = gf.createLinearRing(new PackedCoordinateSequence.Double(world, 2));
+            Polygon pWorld = gf.createPolygon(rWorld, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pWorld, EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithContext() throws Exception {
+        // test a ring around dc
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct context
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setContext(context))));
+
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithSubject() throws Exception {
+        // test a ring around dc
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct subject
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setSubject(subject))));
+
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithSubjectAndContext() throws Exception {
+        // test a ring around dc
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct context subject
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(context).setSubject(subject))));
+
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithPredicate() throws Exception {
+        // test a ring around dc
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct Predicate
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(predicate)))));
+
+            // query with wrong predicate
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(vf.createURI("other:pred"))))));
+        }
+    }
+
+    // @Test
+    public void testAntiMeridianSearch() throws Exception {
+        // verify that a search works if the bounding box crosses the anti meridian
+        try (MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource context = vf.createURI("foo:context");
+
+            Resource subjectEast = vf.createURI("foo:subj:east");
+            URI predicateEast = GeoConstants.GEO_AS_WKT;
+            Value objectEast = vf.createLiteral("Point(179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Statement statementEast = new ContextStatementImpl(subjectEast, predicateEast, objectEast, context);
+            f.storeStatement(convertStatement(statementEast));
+
+            Resource subjectWest = vf.createURI("foo:subj:west");
+            URI predicateWest = GeoConstants.GEO_AS_WKT;
+            Value objectWest = vf.createLiteral("Point(-179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Statement statementWest = new ContextStatementImpl(subjectWest, predicateWest, objectWest, context);
+            f.storeStatement(convertStatement(statementWest));
+
+            f.flush();
+
+            double[] ONE = { 178.1, 1, -178, 1, -178, -1, 178.1, -1, 178.1, 1 };
+
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            Assert.assertEquals(Sets.newHashSet(statementEast, statementWest), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+        }
+    }
+}


[08/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
new file mode 100644
index 0000000..16ef588
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
@@ -0,0 +1,109 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.LookAheadIteration;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+
+/**
+ */
+public class ExternalMultipleBindingSetsIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
+
+    private final ParallelEvaluationStrategyImpl strategy;
+    private final CloseableIteration leftIter;
+    private ExternalBatchingIterator stmtPtrn;
+    private CloseableIteration<BindingSet, QueryEvaluationException> iter;
+    //TODO: configurable
+    private int batchSize = 1000;
+
+    public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, ExternalBatchingIterator stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        leftIter = strategy.evaluate(leftArg, bindings);
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, ExternalBatchingIterator stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        this.leftIter = leftIter;
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    protected void initIter() throws QueryEvaluationException {
+        try {
+            Collection<BindingSet> sets = new ArrayList<BindingSet>();
+            int i = 0;
+            while (leftIter.hasNext()) {
+                //default to 1K for the batch size
+                if (i >= batchSize) {
+                    break;
+                }
+                sets.add((BindingSet) leftIter.next());
+                i++;
+            }
+            if (iter != null) iter.close();
+            iter = stmtPtrn.evaluate(sets);
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected BindingSet getNextElement()
+            throws QueryEvaluationException {
+        try {
+            while (true) {
+                if (iter.hasNext()) {
+                    return iter.next();
+                }
+
+                if (leftIter.hasNext()) {
+                    initIter();
+                } else
+                    return null;
+            }
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected void handleClose()
+            throws QueryEvaluationException {
+        try {
+            super.handleClose();
+            leftIter.close();
+            iter.close();
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
new file mode 100644
index 0000000..24e2527
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
@@ -0,0 +1,97 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.domain.RangeURI;
+import mvm.rya.api.domain.RangeValue;
+import org.openrdf.model.Value;
+import org.openrdf.model.impl.BooleanLiteralImpl;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.*;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static mvm.rya.api.RdfCloudTripleStoreConstants.RANGE;
+
+/**
+ * Class FilterTimeIndexVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class FilterRangeVisitor extends QueryModelVisitorBase {
+
+    private RdfCloudTripleStoreConfiguration conf;
+    private Map<Var, RangeValue> rangeValues = new HashMap<Var, RangeValue>();
+
+    public FilterRangeVisitor(RdfCloudTripleStoreConfiguration conf) {
+        this.conf = conf;
+    }
+
+    @Override
+    public void meet(Filter node) throws Exception {
+        super.meet(node);
+
+        ValueExpr arg = node.getCondition();
+        if (arg instanceof FunctionCall) {
+            FunctionCall fc = (FunctionCall) arg;
+            if (RANGE.stringValue().equals(fc.getURI())) {
+                //range(?var, start, end)
+                List<ValueExpr> valueExprs = fc.getArgs();
+                if (valueExprs.size() != 3) {
+                    throw new QueryEvaluationException("mvm:range must have 3 parameters: variable, start, end");
+                }
+                Var var = (Var) valueExprs.get(0);
+                ValueConstant startVc = (ValueConstant) valueExprs.get(1);
+                ValueConstant endVc = (ValueConstant) valueExprs.get(2);
+                Value start = startVc.getValue();
+                Value end = endVc.getValue();
+                rangeValues.put(var, new RangeValue(start, end));
+                node.setCondition(new ValueConstant(BooleanLiteralImpl.TRUE));
+            }
+        }
+    }
+
+    @Override
+    public void meet(StatementPattern node) throws Exception {
+        super.meet(node);
+
+        Var subjectVar = node.getSubjectVar();
+        RangeValue subjRange = rangeValues.get(subjectVar);
+        Var predVar = node.getPredicateVar();
+        RangeValue predRange = rangeValues.get(predVar);
+        Var objVar = node.getObjectVar();
+        RangeValue objRange = rangeValues.get(objVar);
+        if(subjRange != null) {
+            subjectVar.setValue(new RangeURI(subjRange));//Assumes no blank nodes can be ranges
+        }
+        if(predRange != null) {
+            predVar.setValue(new RangeURI(predRange));
+        }
+        if(objRange != null) {
+            objVar.setValue(objRange);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
new file mode 100644
index 0000000..01f3d27
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
@@ -0,0 +1,108 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.LookAheadIteration;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+/**
+ */
+public class MultipleBindingSetsIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
+
+    private final ParallelEvaluationStrategyImpl strategy;
+    private final CloseableIteration leftIter;
+    private StatementPattern stmtPtrn;
+    private CloseableIteration<BindingSet, QueryEvaluationException> iter;
+    //TODO: configurable
+    private int batchSize = 1000;
+
+    public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, StatementPattern stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        leftIter = strategy.evaluate(leftArg, bindings);
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, StatementPattern stmtPattern, BindingSet bindings)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        this.leftIter = leftIter;
+        this.stmtPtrn = stmtPattern;
+        initIter();
+    }
+
+    protected void initIter() throws QueryEvaluationException {
+        try {
+            Collection<BindingSet> sets = new ArrayList<BindingSet>();
+            int i = 0;
+            while (leftIter.hasNext()) {
+                //default to 1K for the batch size
+                if (i >= batchSize) {
+                    break;
+                }
+                sets.add((BindingSet) leftIter.next());
+                i++;
+            }
+            if (iter != null) iter.close();
+            iter = strategy.evaluate(stmtPtrn, sets);
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected BindingSet getNextElement()
+            throws QueryEvaluationException {
+        try {
+            while (true) {
+                if (iter.hasNext()) {
+                    return iter.next();
+                }
+
+                if (leftIter.hasNext()) {
+                    initIter();
+                } else
+                    return null;
+            }
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    protected void handleClose()
+            throws QueryEvaluationException {
+        try {
+            super.handleClose();
+            leftIter.close();
+            iter.close();
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
new file mode 100644
index 0000000..30dc966
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
@@ -0,0 +1,281 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.ConvertingIteration;
+import info.aduna.iteration.EmptyIteration;
+import info.aduna.iteration.Iteration;
+import info.aduna.iteration.IteratorIteration;
+import info.aduna.iteration.LimitIteration;
+import info.aduna.iteration.OffsetIteration;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreUtils;
+import mvm.rya.api.utils.NullableStatementImpl;
+import mvm.rya.rdftriplestore.RdfCloudTripleStoreConnection;
+import mvm.rya.rdftriplestore.inference.InferenceEngine;
+import mvm.rya.rdftriplestore.inference.InferenceEngineException;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+
+import org.apache.log4j.Logger;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.Dataset;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.QueryRoot;
+import org.openrdf.query.algebra.Slice;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.ValueExpr;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.evaluation.QueryBindingSet;
+import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl;
+import org.openrdf.query.algebra.evaluation.iterator.FilterIterator;
+import org.openrdf.query.algebra.evaluation.iterator.JoinIterator;
+import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil;
+
+import com.google.common.collect.Lists;
+
+/**
+ */
+public class ParallelEvaluationStrategyImpl extends EvaluationStrategyImpl {
+    private static Logger logger = Logger.getLogger(ParallelEvaluationStrategyImpl.class);
+    
+    private int numOfThreads = 10;
+    private boolean performant = true;
+    private boolean displayQueryPlan = false;
+    private ExecutorService executorService;
+    private InferenceEngine inferenceEngine;
+
+    public ParallelEvaluationStrategyImpl(RdfCloudTripleStoreConnection.StoreTripleSource tripleSource, InferenceEngine inferenceEngine,
+                                          Dataset dataset, RdfCloudTripleStoreConfiguration conf) {
+        super(tripleSource, dataset);
+        Integer nthreads = conf.getNumThreads();
+        this.numOfThreads = (nthreads != null) ? nthreads : this.numOfThreads;
+        Boolean val = conf.isPerformant();
+        this.performant = (val != null) ? val : this.performant;
+        val = conf.isDisplayQueryPlan();
+        this.displayQueryPlan = (val != null) ? val : this.displayQueryPlan;
+        this.executorService = Executors.newFixedThreadPool(this.numOfThreads);
+        this.inferenceEngine = inferenceEngine;
+    }
+
+    @Override
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(Join join, BindingSet bindings) throws QueryEvaluationException {
+        if (performant) {
+            TupleExpr buffer = join.getLeftArg();
+            if (join.getRightArg() instanceof StatementPattern) {
+                TupleExpr stmtPat = join.getRightArg();
+//                if(buffer instanceof StatementPattern && !(stmtPat instanceof StatementPattern)){
+//                    buffer = stmtPat;
+//                    stmtPat = join.getLeftArg();
+//                }
+
+                return new MultipleBindingSetsIterator(this, buffer, (StatementPattern) stmtPat, bindings);
+            } else if (join.getRightArg() instanceof ExternalBatchingIterator) {
+                    TupleExpr stmtPat = join.getRightArg();
+
+                    return new ExternalMultipleBindingSetsIterator(this, buffer, (ExternalBatchingIterator) stmtPat, bindings);
+            } else if (join.getRightArg() instanceof Filter) {
+                //add performance for the filter too
+                Filter filter = (Filter) join.getRightArg();
+                TupleExpr filterChild = filter.getArg();
+                if (filterChild instanceof StatementPattern) {
+                    return new FilterIterator(filter, new MultipleBindingSetsIterator(this, buffer, (StatementPattern) filterChild, bindings), this);
+                } else if (filterChild instanceof Join) {
+                    Join filterChildJoin = (Join) filterChild;
+                    TupleExpr fcj_left = filterChildJoin.getLeftArg();
+                    TupleExpr fcj_right = filterChildJoin.getRightArg();
+                    //TODO: Should be a better way, maybe reorder the filter?
+                    //very particular case filter(join(stmtPat, stmtPat))
+                    if (fcj_left instanceof StatementPattern && fcj_right instanceof StatementPattern) {
+                        return new FilterIterator(filter, new MultipleBindingSetsIterator(this, new Join(buffer, fcj_left), (StatementPattern) fcj_right, bindings), this);
+                    }
+                }
+                //TODO: add a configuration flag for ParallelJoinIterator
+                return new JoinIterator(this, join, bindings);
+            } else {
+                //TODO: add a configuration flag for ParallelJoinIterator
+                return new JoinIterator(this, join, bindings);
+            }
+        } else {
+            return super.evaluate(join, bindings);
+        }
+    }
+
+    @Override
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(StatementPattern sp, BindingSet bindings) throws QueryEvaluationException {
+        //TODO: Wonder if creating a Collection here hurts performance
+        Set<BindingSet> bs = Collections.singleton(bindings);
+        return this.evaluate(sp, bs);
+    }
+
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(final StatementPattern sp, Collection<BindingSet> bindings)
+            throws QueryEvaluationException {
+
+        final Var subjVar = sp.getSubjectVar();
+        final Var predVar = sp.getPredicateVar();
+        final Var objVar = sp.getObjectVar();
+        final Var cntxtVar = sp.getContextVar();
+
+        List<Map.Entry<Statement, BindingSet>> stmts = new ArrayList<Map.Entry<Statement, BindingSet>>();
+
+        Iteration<? extends Map.Entry<Statement, BindingSet>, QueryEvaluationException> iter;
+        if (sp instanceof FixedStatementPattern) {
+            Collection<Map.Entry<Statement, BindingSet>> coll = Lists.newArrayList();
+            for (BindingSet binding : bindings) {
+                Value subjValue = getVarValue(subjVar, binding);
+                Value predValue = getVarValue(predVar, binding);
+                Value objValue = getVarValue(objVar, binding);
+                Resource contxtValue = (Resource) getVarValue(cntxtVar, binding);
+                for (Statement st : ((FixedStatementPattern) sp).statements) {
+                    if (!((subjValue != null && !subjValue.equals(st.getSubject())) ||
+                            (predValue != null && !predValue.equals(st.getPredicate())) ||
+                            (objValue != null && !objValue.equals(st.getObject())))) {
+                        coll.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(st, binding));
+                    }
+                }
+            }
+            iter = new IteratorIteration(coll.iterator());
+        } else if (sp instanceof TransitivePropertySP &&
+                ((subjVar != null && subjVar.getValue() != null) ||
+                        (objVar != null && objVar.getValue() != null)) &&
+                sp.getPredicateVar() != null) {
+            //if this is a transitive prop ref, we need to make sure that either the subj or obj is not null
+            //TODO: Cannot handle a open ended transitive property where subj and obj are null
+            //TODO: Should one day handle filling in the subj or obj with bindings and working this
+            //TODO: a lot of assumptions, and might be a large set returned causing an OME
+            Set<Statement> sts = null;
+            try {
+                sts = inferenceEngine.findTransitiveProperty((Resource) getVarValue(subjVar),
+                        (URI) getVarValue(predVar), getVarValue(objVar), (Resource) getVarValue(cntxtVar));
+            } catch (InferenceEngineException e) {
+                throw new QueryEvaluationException(e);
+            }
+            Collection<Map.Entry<Statement, BindingSet>> coll = new ArrayList();
+            for (BindingSet binding : bindings) {
+                for (Statement st : sts) {
+                    coll.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(st, binding));
+                }
+            }
+            iter = new IteratorIteration(coll.iterator());
+        } else {
+            for (BindingSet binding : bindings) {
+                Value subjValue = getVarValue(subjVar, binding);
+                Value predValue = getVarValue(predVar, binding);
+                Value objValue = getVarValue(objVar, binding);
+                Resource contxtValue = (Resource) getVarValue(cntxtVar, binding);
+                if ((subjValue != null && !(subjValue instanceof Resource)) ||
+                        (predValue != null && !(predValue instanceof URI))) {
+                    continue;
+                }
+                stmts.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(
+                        new NullableStatementImpl((Resource) subjValue, (URI) predValue, objValue, contxtValue), binding));
+            }
+            if (stmts.size() == 0) {
+                return new EmptyIteration();
+            }
+
+            iter = ((RdfCloudTripleStoreConnection.StoreTripleSource) tripleSource).getStatements(stmts);
+        }
+        return new ConvertingIteration<Map.Entry<Statement, BindingSet>, BindingSet, QueryEvaluationException>(iter) {
+
+            @Override
+            protected BindingSet convert(Map.Entry<Statement, BindingSet> stbs) throws QueryEvaluationException {
+                Statement st = stbs.getKey();
+                BindingSet bs = stbs.getValue();
+                QueryBindingSet result = new QueryBindingSet(bs);
+                if (subjVar != null && !result.hasBinding(subjVar.getName())) {
+                    result.addBinding(subjVar.getName(), st.getSubject());
+                }
+                if (predVar != null && !result.hasBinding(predVar.getName())) {
+                    result.addBinding(predVar.getName(), st.getPredicate());
+                }
+                if (objVar != null && !result.hasBinding(objVar.getName())) {
+                    result.addBinding(objVar.getName(), st.getObject());
+                }
+                if (cntxtVar != null && !result.hasBinding(cntxtVar.getName()) && st.getContext() != null) {
+                    result.addBinding(cntxtVar.getName(), st.getContext());
+                }
+                return result;
+            }
+        };
+    }
+
+    @Override
+    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(TupleExpr expr, BindingSet bindings) throws QueryEvaluationException {
+        if (expr instanceof QueryRoot) {
+            if (displayQueryPlan) {
+//                System.out.println("Tables: ");
+//                System.out.println("--SPO: \t" + RdfCloudTripleStoreConstants.TBL_SPO);
+//                System.out.println("--PO: \t" + RdfCloudTripleStoreConstants.TBL_PO);
+//                System.out.println("--OSP: \t" + RdfCloudTripleStoreConstants.TBL_OSP);
+                logger.info("=================== Rya Query ===================");
+                for (String str : expr.toString().split("\\r?\\n")) {
+                    logger.info(str);
+                }
+                logger.info("================= End Rya Query =================");
+            }
+        }
+        return super.evaluate(expr, bindings);
+    }
+
+    public CloseableIteration evaluate(Slice slice, BindingSet bindings)
+            throws QueryEvaluationException {
+        CloseableIteration result = evaluate(slice.getArg(), bindings);
+        if (slice.hasOffset()) {
+            result = new OffsetIteration(result, slice.getOffset());
+        }
+        if (slice.hasLimit()) {
+            result = new LimitIteration(result, slice.getLimit());
+        }
+        return result;
+    }
+
+    protected Value getVarValue(Var var) {
+        if (var == null)
+            return null;
+        else
+            return var.getValue();
+    }
+
+    public void shutdown() {
+        executorService.shutdownNow();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
new file mode 100644
index 0000000..1d5c982
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
@@ -0,0 +1,139 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import info.aduna.iteration.LookAheadIteration;
+
+import java.util.NoSuchElementException;
+import java.util.Queue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.evaluation.EvaluationStrategy;
+import org.openrdf.query.impl.EmptyBindingSet;
+
+/**
+ */
+public class ParallelJoinIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
+
+    public static final EmptyBindingSet EMPTY_BINDING_SET = new EmptyBindingSet();
+
+    private final EvaluationStrategy strategy;
+    private final Join join;
+    private final CloseableIteration<BindingSet, QueryEvaluationException> leftIter;
+
+    private ExecutorService executorService;
+    private Queue<ParallelIteratorWork> workQueue = new LinkedBlockingQueue<ParallelIteratorWork>();
+    private ParallelIteratorWork currentWork;
+    private int batch;
+
+    public ParallelJoinIterator(EvaluationStrategy strategy, Join join, BindingSet bindings, ExecutorService executorService, int batch)
+            throws QueryEvaluationException {
+        this.strategy = strategy;
+        this.join = join;
+        leftIter = strategy.evaluate(join.getLeftArg(), bindings);
+
+        this.executorService = executorService;
+        this.batch = batch;
+    }
+
+
+    @Override
+    protected BindingSet getNextElement() throws QueryEvaluationException {
+
+        try {
+            while (leftIter.hasNext() || !workQueue.isEmpty() || currentWork != null) {
+                if (!workQueue.isEmpty() && currentWork == null) {
+                    currentWork = workQueue.poll();
+                }
+
+                if (currentWork != null) {
+                    BindingSet bindingSet = currentWork.queue.poll();
+                    if (EMPTY_BINDING_SET.equals(bindingSet)) {
+                        currentWork = null;
+                        continue;
+                    } else if (bindingSet == null) {
+                        continue;
+                    }
+                    return bindingSet;
+                }
+
+                try {
+                    for (int i = 0; i < batch; i++) {
+                        if (leftIter.hasNext()) {
+                            ParallelIteratorWork work = new ParallelIteratorWork((BindingSet) leftIter.next(), join.getRightArg());
+                            workQueue.add(work);
+                            executorService.execute(work);
+                        } else
+                            break;
+                    }
+                } catch (NoSuchElementException ignore) {
+                }
+            }
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+        return null;
+    }
+
+    @Override
+    protected void handleClose() throws QueryEvaluationException {
+        try {
+            super.handleClose();
+            leftIter.close();
+//           rightIter.close();
+        } catch (Exception e) {
+            throw new QueryEvaluationException(e);
+        }
+    }
+
+    private class ParallelIteratorWork implements Runnable {
+
+        private BindingSet leftBindingSet;
+        private TupleExpr rightTupleExpr;
+        public LinkedBlockingQueue<BindingSet> queue = new LinkedBlockingQueue<BindingSet>();
+
+        private ParallelIteratorWork(BindingSet leftBindingSet, TupleExpr rightTupleExpr) {
+            this.leftBindingSet = leftBindingSet;
+            this.rightTupleExpr = rightTupleExpr;
+        }
+
+        @Override
+        public void run() {
+            try {
+                CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy.evaluate(rightTupleExpr, leftBindingSet);
+                while (iter.hasNext()) {
+                    queue.add(iter.next());
+                }
+                queue.add(EMPTY_BINDING_SET);
+                iter.close();
+            } catch (QueryEvaluationException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
new file mode 100644
index 0000000..342f98d
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
@@ -0,0 +1,57 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+/**
+ * Class ReorderJoinVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class PushJoinDownVisitor extends QueryModelVisitorBase {
+    @Override
+    public void meet(Join node) throws Exception {
+        super.meet(node);
+
+        TupleExpr leftArg = node.getLeftArg();
+        TupleExpr rightArg = node.getRightArg();
+
+        /**
+         * if join(join(1, 2), join(3,4))
+         * should be:
+         * join(join(join(1,2), 3), 4)
+         */
+        if (leftArg instanceof Join && rightArg instanceof Join) {
+            Join leftJoin = (Join) leftArg;
+            Join rightJoin = (Join) rightArg;
+            TupleExpr right_LeftArg = rightJoin.getLeftArg();
+            TupleExpr right_rightArg = rightJoin.getRightArg();
+            Join inner = new Join(leftJoin, right_LeftArg);
+            Join outer = new Join(inner, right_rightArg);
+            node.replaceWith(outer);
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
new file mode 100644
index 0000000..940e46e
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
@@ -0,0 +1,284 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.Dataset;
+import org.openrdf.query.algebra.*;
+import org.openrdf.query.algebra.evaluation.QueryOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+import org.openrdf.query.algebra.helpers.StatementPatternCollector;
+
+import java.util.*;
+
+/**
+ * A query optimizer that re-orders nested Joins.
+ *
+ * @author Arjohn Kampman
+ * @author James Leigh
+ */
+public class QueryJoinOptimizer implements QueryOptimizer {
+
+    protected final EvaluationStatistics statistics;
+
+    public QueryJoinOptimizer() {
+        this(new EvaluationStatistics());
+    }
+
+    public QueryJoinOptimizer(EvaluationStatistics statistics) {
+        this.statistics = statistics;
+    }
+
+    /**
+     * Applies generally applicable optimizations: path expressions are sorted
+     * from more to less specific.
+     *
+     * @param tupleExpr
+     */
+    public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
+        tupleExpr.visit(new JoinVisitor());
+    }
+
+    protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
+
+        Set<String> boundVars = new HashSet<String>();
+
+        @Override
+        public void meet(LeftJoin leftJoin) {
+            leftJoin.getLeftArg().visit(this);
+
+            Set<String> origBoundVars = boundVars;
+            try {
+                boundVars = new HashSet<String>(boundVars);
+                boundVars.addAll(leftJoin.getLeftArg().getBindingNames());
+
+                leftJoin.getRightArg().visit(this);
+            } finally {
+                boundVars = origBoundVars;
+            }
+        }
+
+        @Override
+        public void meet(Join node) {
+            Set<String> origBoundVars = boundVars;
+            try {
+                boundVars = new HashSet<String>(boundVars);
+
+                // Recursively get the join arguments
+                List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
+
+                // Build maps of cardinalities and vars per tuple expression
+                Map<TupleExpr, Double> cardinalityMap = new HashMap<TupleExpr, Double>();
+//                Map<TupleExpr, List<Var>> varsMap = new HashMap<TupleExpr, List<Var>>();
+//                Map<Var, Double> varCardinalityMap = new HashMap<Var, Double>();
+
+                for (TupleExpr tupleExpr : joinArgs) {
+                    double cardinality = statistics.getCardinality(tupleExpr);
+//                    List<Var> statementPatternVars = getStatementPatternVars(tupleExpr);
+
+                    cardinalityMap.put(tupleExpr, cardinality);
+//                    varsMap.put(tupleExpr, statementPatternVars);
+                }
+
+                // Build map of var frequences
+//                Map<Var, Integer> varFreqMap = new HashMap<Var, Integer>();
+//                for (List<Var> varList : varsMap.values()) {
+//                    getVarFreqMap(varList, varFreqMap);
+//                }
+
+                // Reorder the (recursive) join arguments to a more optimal sequence
+                List<TupleExpr> orderedJoinArgs = new ArrayList<TupleExpr>(joinArgs.size());
+                while (!joinArgs.isEmpty()) {
+                    TupleExpr tupleExpr = selectNextTupleExpr(joinArgs, cardinalityMap
+                    );
+                    if (tupleExpr == null) {
+                        break;
+                    }
+
+                    joinArgs.remove(tupleExpr);
+                    orderedJoinArgs.add(tupleExpr);
+
+                    // Recursively optimize join arguments
+                    tupleExpr.visit(this);
+
+                    boundVars.addAll(tupleExpr.getBindingNames());
+                }
+
+                // Build new join hierarchy
+                // Note: generated hierarchy is right-recursive to help the
+                // IterativeEvaluationOptimizer to factor out the left-most join
+                // argument
+                int i = 0;
+                TupleExpr replacement = orderedJoinArgs.get(i);
+                for (i++; i < orderedJoinArgs.size(); i++) {
+                    replacement = new Join(replacement, orderedJoinArgs.get(i));
+                }
+
+                // Replace old join hierarchy
+                node.replaceWith(replacement);
+            } finally {
+                boundVars = origBoundVars;
+            }
+        }
+
+        protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
+            if (tupleExpr instanceof Join) {
+                Join join = (Join) tupleExpr;
+                getJoinArgs(join.getLeftArg(), joinArgs);
+                getJoinArgs(join.getRightArg(), joinArgs);
+            } else {
+                joinArgs.add(tupleExpr);
+            }
+
+            return joinArgs;
+        }
+
+        protected List<Var> getStatementPatternVars(TupleExpr tupleExpr) {
+            List<StatementPattern> stPatterns = StatementPatternCollector.process(tupleExpr);
+            List<Var> varList = new ArrayList<Var>(stPatterns.size() * 4);
+            for (StatementPattern sp : stPatterns) {
+                sp.getVars(varList);
+            }
+            return varList;
+        }
+
+        protected <M extends Map<Var, Integer>> M getVarFreqMap(List<Var> varList, M varFreqMap) {
+            for (Var var : varList) {
+                Integer freq = varFreqMap.get(var);
+                freq = (freq == null) ? 1 : freq + 1;
+                varFreqMap.put(var, freq);
+            }
+            return varFreqMap;
+        }
+
+        /**
+         * Selects from a list of tuple expressions the next tuple expression that
+         * should be evaluated. This method selects the tuple expression with
+         * highest number of bound variables, preferring variables that have been
+         * bound in other tuple expressions over variables with a fixed value.
+         */
+        protected TupleExpr selectNextTupleExpr(List<TupleExpr> expressions,
+                                                Map<TupleExpr, Double> cardinalityMap
+//                                                ,Map<TupleExpr, List<Var>> varsMap,
+//                                                Map<Var, Integer> varFreqMap, Set<String> boundVars
+        ) {
+            double lowestCardinality = Double.MAX_VALUE;
+            TupleExpr result = expressions.get(0);
+
+            for (TupleExpr tupleExpr : expressions) {
+                // Calculate a score for this tuple expression
+//                double cardinality = getTupleExprCardinality(tupleExpr, cardinalityMap, varsMap, varFreqMap, boundVars);
+                double cardinality = cardinalityMap.get(tupleExpr);
+//                List<Var> vars = varsMap.get(tupleExpr);
+//                List<Var> distinctUnboundVars = getUnboundVars(vars);
+//                if (distinctUnboundVars.size() >= 2) {
+//                    cardinality *= (distinctUnboundVars.size() + 1);
+//                }
+
+                if (cardinality < lowestCardinality) {
+                    // More specific path expression found
+                    lowestCardinality = cardinality;
+                    result = tupleExpr;
+                }
+            }
+
+            return result;
+        }
+
+        protected double getTupleExprCardinality(TupleExpr tupleExpr, Map<TupleExpr, Double> cardinalityMap,
+                                                 Map<TupleExpr, List<Var>> varsMap, Map<Var, Integer> varFreqMap, Set<String> boundVars) {
+            double cardinality = cardinalityMap.get(tupleExpr);
+
+            List<Var> vars = varsMap.get(tupleExpr);
+
+            // Compensate for variables that are bound earlier in the evaluation
+            List<Var> unboundVars = getUnboundVars(vars);
+            List<Var> constantVars = getConstantVars(vars);
+            int nonConstantVarCount = vars.size() - constantVars.size();
+            if (nonConstantVarCount > 0) {
+                double exp = (double) unboundVars.size() / nonConstantVarCount;
+                cardinality = Math.pow(cardinality, exp);
+            }
+
+            if (unboundVars.isEmpty()) {
+                // Prefer patterns with more bound vars
+                if (nonConstantVarCount > 0) {
+                    cardinality /= nonConstantVarCount;
+                }
+            } else {
+                // Prefer patterns that bind variables from other tuple expressions
+                int foreignVarFreq = getForeignVarFreq(unboundVars, varFreqMap);
+                if (foreignVarFreq > 0) {
+                    cardinality /= foreignVarFreq;
+                }
+            }
+
+            // Prefer patterns that bind more variables
+            List<Var> distinctUnboundVars = getUnboundVars(new
+                    HashSet<Var>(vars));
+            if (distinctUnboundVars.size() >= 2) {
+                cardinality /= distinctUnboundVars.size();
+            }
+
+            return cardinality;
+        }
+
+        protected List<Var> getConstantVars(Iterable<Var> vars) {
+            List<Var> constantVars = new ArrayList<Var>();
+
+            for (Var var : vars) {
+                if (var.hasValue()) {
+                    constantVars.add(var);
+                }
+            }
+
+            return constantVars;
+        }
+
+        protected List<Var> getUnboundVars(Iterable<Var> vars) {
+            List<Var> unboundVars = new ArrayList<Var>();
+
+            for (Var var : vars) {
+                if (!var.hasValue() && !this.boundVars.contains(var.getName())) {
+                    unboundVars.add(var);
+                }
+            }
+
+            return unboundVars;
+        }
+
+        protected int getForeignVarFreq(List<Var> ownUnboundVars, Map<Var, Integer> varFreqMap) {
+            int result = 0;
+
+            Map<Var, Integer> ownFreqMap = getVarFreqMap(ownUnboundVars, new HashMap<Var, Integer>());
+
+            for (Map.Entry<Var, Integer> entry : ownFreqMap.entrySet()) {
+                Var var = entry.getKey();
+                int ownFreq = entry.getValue();
+                result += varFreqMap.get(var) - ownFreq;
+            }
+
+            return result;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
new file mode 100644
index 0000000..643446a
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
@@ -0,0 +1,260 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.Dataset;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.evaluation.QueryOptimizer;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+public class QueryJoinSelectOptimizer implements QueryOptimizer {
+
+  private final EvaluationStatistics statistics;
+  private final SelectivityEvalDAO eval;
+  private final RdfCloudTripleStoreConfiguration config;
+
+  public QueryJoinSelectOptimizer(EvaluationStatistics statistics, SelectivityEvalDAO eval) {
+    System.out.println("Entering join optimizer!");
+    this.statistics = statistics;
+    this.eval = eval;
+    this.config = eval.getConf();
+  }
+
+  /**
+   * Applies generally applicable optimizations: path expressions are sorted from more to less specific.
+   *
+   * @param tupleExpr
+   */
+  public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
+    tupleExpr.visit(new JoinVisitor());
+  }
+
+  protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
+
+    @Override
+    public void meet(Join node) {
+
+      try {
+        if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) {
+          return;
+        }
+
+        TupleExpr partialQuery = null;
+        List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
+        Map<TupleExpr,Double> cardinalityMap = new HashMap<TupleExpr,Double>();
+
+        for (TupleExpr tupleExpr : joinArgs) {
+          double cardinality = statistics.getCardinality(tupleExpr);
+          cardinalityMap.put(tupleExpr, cardinality);
+
+        }
+
+        while (!joinArgs.isEmpty()) {
+          TePairCost tpc = getBestTupleJoin(partialQuery, joinArgs);
+          List<TupleExpr> tePair = tpc.getTePair();
+          if (partialQuery == null) {
+            if (tePair.size() != 2) {
+              throw new IllegalStateException();
+            }
+            if (!(tePair.get(0) instanceof Join)) {
+              tePair.get(0).visit(this);
+            }
+            if (!(tePair.get(1) instanceof Join)) {
+              tePair.get(1).visit(this);
+            }
+            if (tePair.get(1) instanceof Join) {
+              partialQuery = new Join(tePair.get(0), ((Join) tePair.get(1)).getLeftArg());
+              partialQuery = new Join(partialQuery, ((Join) tePair.get(1)).getRightArg());
+              joinArgs.remove(tePair.get(0));
+              joinArgs.remove(tePair.get(1));
+            } else {
+              partialQuery = new Join(tePair.get(0), tePair.get(1));
+              joinArgs.remove(tePair.get(0));
+              joinArgs.remove(tePair.get(1));
+            }
+          } else {
+            if (tePair.size() != 1) {
+              throw new IllegalStateException();
+            }
+            if (!(tePair.get(0) instanceof Join)) {
+              tePair.get(0).visit(this);
+            }
+
+            if (tePair.get(0) instanceof Join) {
+              partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getLeftArg());
+              partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getRightArg());
+              joinArgs.remove(tePair.get(0));
+
+            } else {
+              partialQuery = new Join(partialQuery, tePair.get(0));
+              joinArgs.remove(tePair.get(0));
+            }
+          }
+
+        }
+
+        // Replace old join hierarchy
+        node.replaceWith(partialQuery);
+
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+    }
+
+    protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
+      if (tupleExpr instanceof Join) {
+        if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) {
+          Join join = (Join) tupleExpr;
+          getJoinArgs(join.getLeftArg(), joinArgs);
+          getJoinArgs(join.getRightArg(), joinArgs);
+        } else {
+          joinArgs.add(tupleExpr);
+        }
+      } else {
+        joinArgs.add(tupleExpr);
+      }
+
+      return joinArgs;
+    }
+
+    public TePairCost getBestTupleJoin(TupleExpr partialQuery, List<TupleExpr> teList) throws Exception {
+
+      double tempCost = 0;
+      double bestCost = Double.MAX_VALUE;
+      List<TupleExpr> bestJoinNodes = new ArrayList<TupleExpr>();
+
+      if (partialQuery == null) {
+
+        double jSelect = 0;
+        double card1 = 0;
+        double card2 = 0;
+        TupleExpr teMin1 = null;
+        TupleExpr teMin2 = null;
+        double bestCard1 = 0;
+        double bestCard2 = 0;
+
+        for (int i = 0; i < teList.size(); i++) {
+          for (int j = i + 1; j < teList.size(); j++) {
+            jSelect = eval.getJoinSelect(config, teList.get(i), teList.get(j));
+            card1 = statistics.getCardinality(teList.get(i));
+            card2 = statistics.getCardinality(teList.get(j));
+            tempCost = card1 + card2 + card1 * card2 * jSelect;
+//             System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
+//             + jSelect + ", and nodes are "
+//             + teList.get(i) + " and " + teList.get(j));
+
+            // TODO this generates a nullpointer exception if tempCost = Double.Max
+            if (bestCost > tempCost) {
+
+              teMin1 = teList.get(i);
+              teMin2 = teList.get(j);
+              bestCard1 = card1;
+              bestCard2 = card2;
+              bestCost = tempCost;
+
+              if (bestCost == 0) {
+                bestJoinNodes.add(teMin1);
+                bestJoinNodes.add(teMin2);
+                return new TePairCost(0.0, bestJoinNodes);
+              }
+            }
+          }
+        }
+
+        if (bestCard1 < bestCard2) {
+
+          bestJoinNodes.add(teMin1);
+          bestJoinNodes.add(teMin2);
+
+        } else {
+          bestJoinNodes.add(teMin2);
+          bestJoinNodes.add(teMin1);
+        }
+        //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + jSelect + ", and best cost is" + bestCost);
+        return new TePairCost(bestCost, bestJoinNodes);
+
+      } else {
+        double card1 = statistics.getCardinality(partialQuery);
+        TupleExpr bestTe = null;
+        double card2 = 0;
+        double select = 0;
+
+        for (TupleExpr te : teList) {
+          select = eval.getJoinSelect(config, partialQuery, te);
+          card2 = statistics.getCardinality(te);
+          tempCost = card1 + card2 + card1 * card2 * select;
+//          System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
+//                  + select + ", and nodes are "
+//                  + partialQuery + " and " + te);
+
+
+          if (bestCost > tempCost) {
+            bestTe = te;
+            bestCost = tempCost;
+          }
+
+        }
+        List<TupleExpr> teList2 = new ArrayList<TupleExpr>();
+        teList2.add(bestTe);
+        //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + select + ", and best cost is" + bestCost);
+        return new TePairCost(bestCost, teList2);
+      }
+
+    }
+
+    // **************************************************************************************
+    public class TePairCost {
+
+      private double cost;
+      private List<TupleExpr> tePair;
+
+      public TePairCost(double cost, List<TupleExpr> tePair) {
+        this.cost = cost;
+        this.tePair = tePair;
+
+      }
+
+      public double getCost() {
+        return cost;
+      }
+
+      public List<TupleExpr> getTePair() {
+        return tePair;
+      }
+
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
new file mode 100644
index 0000000..b0fa46c
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
@@ -0,0 +1,281 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static com.google.common.base.Preconditions.checkNotNull;
+//import static RdfCloudTripleStoreUtils.getTtlValueConverter;
+
+
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.query.algebra.BinaryTupleOperator;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.Slice;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.UnaryTupleOperator;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
+
+/**
+ * Class RdfCloudTripleStoreEvaluationStatistics
+ * Date: Apr 12, 2011
+ * Time: 1:31:05 PM
+ */
+public class RdfCloudTripleStoreEvaluationStatistics extends EvaluationStatistics {
+
+    private RdfCloudTripleStoreConfiguration conf;
+    private RdfEvalStatsDAO rdfEvalStatsDAO;
+    protected boolean pushEmptyRdfTypeDown = true;
+    protected boolean useCompositeCardinalities = true;
+
+    public RdfCloudTripleStoreEvaluationStatistics(RdfCloudTripleStoreConfiguration conf, RdfEvalStatsDAO rdfEvalStatsDAO) {
+        checkNotNull(conf);
+        checkNotNull(rdfEvalStatsDAO);
+        try {
+            this.conf = conf;
+            this.rdfEvalStatsDAO = rdfEvalStatsDAO;
+            pushEmptyRdfTypeDown = conf.isStatsPushEmptyRdftypeDown();
+            useCompositeCardinalities = conf.isUseCompositeCardinality();
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    public double getCardinality(TupleExpr expr) {
+        if (expr instanceof Filter) {
+            Filter f = (Filter) expr;
+            // filters must make sets smaller
+            return super.getCardinality(f.getArg()) / 10;
+        }
+        return super.getCardinality(expr);
+    }
+
+    @Override
+    protected CardinalityCalculator createCardinalityCalculator() {
+        return new RdfCloudTripleStoreCardinalityCalculator(this);
+    }
+
+    public RdfEvalStatsDAO getRdfEvalStatsDAO() {
+        return rdfEvalStatsDAO;
+    }
+
+    public void setRdfEvalStatsDAO(RdfEvalStatsDAO rdfEvalStatsDAO) {
+        this.rdfEvalStatsDAO = rdfEvalStatsDAO;
+    }
+
+    public class RdfCloudTripleStoreCardinalityCalculator extends CardinalityCalculator {
+        private RdfCloudTripleStoreEvaluationStatistics statistics;
+        protected Map<Var, Collection<Statement>> fspMap;
+
+        public RdfCloudTripleStoreCardinalityCalculator(RdfCloudTripleStoreEvaluationStatistics statistics) {
+            this.statistics = statistics;
+        }
+        
+ 
+        @Override
+        protected double getCardinality(StatementPattern sp) {
+            Var subjectVar = sp.getSubjectVar();
+            Resource subj = (Resource) getConstantValue(subjectVar);
+            Var predicateVar = sp.getPredicateVar();
+            URI pred = (URI) getConstantValue(predicateVar);
+            Var objectVar = sp.getObjectVar();
+            Value obj = getConstantValue(objectVar);
+            Resource context = (Resource) getConstantValue(sp.getContextVar());
+
+            // set rdf type to be a max value (as long as the object/subject aren't specified) to 
+                if (pred != null) {
+                    if (statistics.pushEmptyRdfTypeDown && RDF.TYPE.equals(pred) && subj == null && obj == null) {
+                        return Double.MAX_VALUE;
+                    }
+                }
+
+            // FixedStatementPattern indicates that this is when backward chaining reasoning is being used
+            if (sp instanceof FixedStatementPattern) {
+                //no query here
+                FixedStatementPattern fsp = (FixedStatementPattern) sp;
+                //TODO: assume that only the subject is open ended here
+                Var fspSubjectVar = fsp.getSubjectVar();
+                if (fspSubjectVar != null && fspSubjectVar.getValue() == null) {
+                    if (fspMap == null) {
+                        fspMap = new HashMap<Var, Collection<Statement>>();
+                    }
+                    fspMap.put(fspSubjectVar, fsp.statements);
+                }
+                return fsp.statements.size();
+            }
+
+            /**
+             * Use the output of the FixedStatementPattern to determine more information
+             */
+            if (fspMap != null && sp instanceof DoNotExpandSP) {
+                //TODO: Might be a better way than 3 map pulls
+                RdfEvalStatsDAO.CARDINALITY_OF cardinality_of = null;
+                Collection<Statement> statements = null;
+                // TODO unsure of how to incorporate additional cardinalities here
+                if (objectVar != null && objectVar.getValue() == null) {
+                    statements = fspMap.get(objectVar);
+                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.OBJECT;
+                }
+                if (statements == null && predicateVar != null && predicateVar.getValue() == null) {
+                    statements = fspMap.get(predicateVar);
+                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE;
+                }
+                if (statements == null && subjectVar != null && subjectVar.getValue() == null) {
+                    statements = fspMap.get(subjectVar);
+                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT;
+                }
+                if (statements != null) {
+                    double fspCard = 0;
+                    for (Statement statement : statements) {
+                    	List<Value> values = new ArrayList<Value>();
+                    	values.add(statement.getSubject());
+                    	fspCard  += rdfEvalStatsDAO.getCardinality(conf, cardinality_of, values, context);
+                    }
+                    return fspCard;
+                }
+            }
+
+            /**
+             * We put full triple scans before rdf:type because more often than not
+             * the triple scan is being joined with something else that is better than
+             * asking the full rdf:type of everything.
+             */
+            double cardinality = Double.MAX_VALUE - 1;
+            try {
+                if (subj != null) {
+                	List<Value> values = new ArrayList<Value>();
+                	CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT;
+            		values.add(subj);
+            		if (useCompositeCardinalities){
+                   	    if (pred != null){
+                    		values.add(pred);
+                    		card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTPREDICATE;
+                    	}
+                   	    else if (obj != null){
+                    		values.add(obj);
+                    		card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTOBJECT;
+                   	    }
+            		}
+                	double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context);
+                	// the cardinality will be -1 if there was no value found (if the index does not exist)
+                    if (evalCard >= 0) {
+                        cardinality = Math.min(cardinality, evalCard);
+                    } else {
+                        cardinality = 1;
+                    }
+                }
+                else if (pred != null) {
+                	List<Value> values = new ArrayList<Value>();
+                	CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE;
+            		values.add(pred);
+            		if (useCompositeCardinalities){
+                   	    if (obj != null){
+                    		values.add(obj);
+                    		card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATEOBJECT;
+                   	    }
+            		}
+                	double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context);
+                    if (evalCard >= 0) {
+                        cardinality = Math.min(cardinality, evalCard);
+                    } else {
+                        cardinality = 1;
+                    }
+                }
+                else if (obj != null) {
+                	List<Value> values = new ArrayList<Value>();
+            		values.add(obj);
+                    double evalCard = rdfEvalStatsDAO.getCardinality(conf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values, context);
+                    if (evalCard >= 0) {
+                        cardinality = Math.min(cardinality, evalCard);
+                    } else {
+                        cardinality = 1;
+                    }
+                }
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+
+            return cardinality;
+        }
+
+        @Override
+        protected void meetUnaryTupleOperator(UnaryTupleOperator node) {
+            if (node instanceof Projection) {
+                cardinality += -1.0;
+            }
+            super.meetUnaryTupleOperator(node);
+        }
+
+        @Override
+        protected void meetBinaryTupleOperator(BinaryTupleOperator node) {
+            node.getLeftArg().visit(this);
+            double leftArgCost = cardinality;
+            node.getRightArg().visit(this);
+            cardinality += leftArgCost;
+        }
+        
+        // TODO Is this sufficient for add capability of slice node?
+        @Override
+        public void meet(Slice node) {
+            cardinality = node.getLimit();
+        }
+        
+
+        @Override
+        public void meet(Join node) {
+            node.getLeftArg().visit(this);
+            double leftArgCost = cardinality;
+            node.getRightArg().visit(this);
+            if (leftArgCost > cardinality) {
+                cardinality = leftArgCost;    //TODO: Is this ok?
+            }
+        }
+
+        protected Value getConstantValue(Var var) {
+            if (var != null)
+                return var.getValue();
+            else
+                return null;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
new file mode 100644
index 0000000..7c88640
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
@@ -0,0 +1,128 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.StatementPattern;
+
+public class RdfCloudTripleStoreSelectivityEvaluationStatistics extends RdfCloudTripleStoreEvaluationStatistics {
+
+  // allows access to join selectivity and extending RdfCloudTripleStoreEvaluationStatistics allows for use of prospector
+  private SelectivityEvalDAO selectEvalStatsDAO; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains
+                                                 // RdfEvalStatsDAO object
+
+  protected double filterCard;
+  RdfCloudTripleStoreConfiguration config; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains conf as well
+
+  public RdfCloudTripleStoreSelectivityEvaluationStatistics(RdfCloudTripleStoreConfiguration conf,
+      RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> prospector, SelectivityEvalDAO selectEvalStatsDAO) {
+
+    super(conf, prospector);
+    checkNotNull(selectEvalStatsDAO);
+
+    try {
+      this.selectEvalStatsDAO = selectEvalStatsDAO;
+      this.config = conf; // TODO fix this!
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  protected CardinalityCalculator createCardinalityCalculator() {
+    try {
+      return new SelectivityCardinalityCalculator(this);
+    } catch (Exception e) {
+      System.out.println(e);
+      throw new RuntimeException(e);
+    }
+  }
+
+  public class SelectivityCardinalityCalculator extends RdfCloudTripleStoreCardinalityCalculator {
+
+    public SelectivityCardinalityCalculator(RdfCloudTripleStoreSelectivityEvaluationStatistics statistics) {
+      super(statistics);
+    }
+
+    @Override
+    public void meet(Join node) {
+      node.getLeftArg().visit(this);
+      double leftArgCost = cardinality;
+      // System.out.println("Left cardinality is " + cardinality);
+      node.getRightArg().visit(this);
+
+      if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) {
+        return;
+      }
+
+      try {
+        double selectivity = selectEvalStatsDAO.getJoinSelect(config, node.getLeftArg(), node.getRightArg());
+//        System.out.println("CardCalc: left cost of " + node.getLeftArg() + " is " + leftArgCost + " right cost of "
+//        + node.getRightArg() + " is " + cardinality);
+//         System.out.println("Right cardinality is " + cardinality);
+        cardinality += leftArgCost + leftArgCost * cardinality * selectivity;
+//        System.out.println("CardCalc: Cardinality is " + cardinality);
+//        System.out.println("CardCalc: Selectivity is " + selectivity);
+        // System.out.println("Join cardinality is " + cardinality);
+
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+
+    }
+    
+    
+    
+    
+        @Override
+        public double getCardinality(StatementPattern node) {
+
+            cardinality = super.getCardinality(node);
+
+            // If sp contains all variables or is EmptyRDFtype, assign
+            // cardinality
+            // equal to table size
+            if (cardinality == Double.MAX_VALUE || cardinality == Double.MAX_VALUE - 1) {
+                try {
+                    cardinality = selectEvalStatsDAO.getTableSize(config);
+                } catch (Exception e) {
+                    // TODO Auto-generated catch block
+                    e.printStackTrace();
+                }
+            }
+
+            return cardinality;
+        }
+    
+    
+    
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
new file mode 100644
index 0000000..f825921
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
@@ -0,0 +1,70 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+/**
+ * Class ReorderJoinVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class ReorderJoinVisitor extends QueryModelVisitorBase {
+    @Override
+    public void meet(Join node) throws Exception {
+        super.meet(node);
+        
+        TupleExpr leftArg = node.getLeftArg();
+        TupleExpr rightArg = node.getRightArg();
+
+        /**
+         * if join(stmtPattern1, join(stmtPattern2, anything)
+         * Should be
+         * join(join(stmtPattern1, stmtPattern2), anything)
+         */
+        if (leftArg instanceof StatementPattern && rightArg instanceof Join) {
+            Join rightJoin = (Join) rightArg;
+            //find the stmtPattern in the right side
+            TupleExpr right_LeftArg = rightJoin.getLeftArg();
+            TupleExpr right_rightArg = rightJoin.getRightArg();
+            if (right_LeftArg instanceof StatementPattern || right_rightArg instanceof StatementPattern) {
+                StatementPattern stmtPattern = null;
+                TupleExpr anything = null;
+                if (right_LeftArg instanceof StatementPattern) {
+                    stmtPattern = (StatementPattern) right_LeftArg;
+                    anything = right_rightArg;
+                } else {
+                    stmtPattern = (StatementPattern) right_rightArg;
+                    anything = right_LeftArg;
+                }
+
+                Join inner = new Join(leftArg, stmtPattern);
+                Join outer = new Join(inner, anything);
+                node.replaceWith(outer);
+            }
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
new file mode 100644
index 0000000..002b804
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
@@ -0,0 +1,55 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.*;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+/**
+ * TODO: This might be a very bad thing. It may force all AND and not allow ORs?. Depends on how they do the bindings.
+ * Class SeparateFilterJoinsVisitor
+ * Date: Apr 11, 2011
+ * Time: 10:16:15 PM
+ */
+public class SeparateFilterJoinsVisitor extends QueryModelVisitorBase {
+    @Override
+    public void meet(Filter node) throws Exception {
+        super.meet(node);
+
+        ValueExpr condition = node.getCondition();
+        TupleExpr arg = node.getArg();
+        if (!(arg instanceof Join)) {
+            return;
+        }
+
+        Join join = (Join) arg;
+        TupleExpr leftArg = join.getLeftArg();
+        TupleExpr rightArg = join.getRightArg();
+
+        if (leftArg instanceof StatementPattern && rightArg instanceof StatementPattern) {
+            Filter left = new Filter(leftArg, condition);
+            Filter right = new Filter(rightArg, condition);
+            node.replaceWith(new Join(left, right));
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
new file mode 100644
index 0000000..f6d3ff0
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
@@ -0,0 +1,108 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Union;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+/**
+ * Class AbstractInferVisitor
+ * Date: Mar 14, 2012
+ * Time: 5:33:01 PM
+ */
+public class AbstractInferVisitor extends QueryModelVisitorBase {
+
+    static Var EXPANDED = new Var("infer-expanded");
+
+    boolean include = true;
+
+    RdfCloudTripleStoreConfiguration conf;
+    InferenceEngine inferenceEngine;
+
+    public AbstractInferVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
+        checkNotNull(conf, "Configuration cannot be null");
+        checkNotNull(inferenceEngine, "Inference Engine cannot be null");
+        this.conf = conf;
+        this.inferenceEngine = inferenceEngine;
+    }
+
+    @Override
+    public void meet(StatementPattern sp) throws Exception {
+        if (!include) {
+            return;
+        }
+        if (sp instanceof FixedStatementPattern || sp instanceof TransitivePropertySP || sp instanceof DoNotExpandSP) {
+            return;   //already inferred somewhere else
+        }
+        final Var predVar = sp.getPredicateVar();
+        //we do not let timeRange preds be inferred, not good
+        if (predVar == null || predVar.getValue() == null
+//                || RdfCloudTripleStoreUtils.getTtlValueConverter(conf, (URI) predVar.getValue()) != null
+                ) {
+            return;
+        }
+        meetSP(sp);
+    }
+
+    protected void meetSP(StatementPattern sp) throws Exception {
+
+    }
+
+    @Override
+    public void meet(Union node) throws Exception {
+//        if (!(node instanceof InferUnion))
+        super.meet(node);
+    }
+
+    @Override
+    public void meet(Join node) throws Exception {
+        if (!(node instanceof InferJoin)) {
+            super.meet(node);
+        }
+    }
+
+    public RdfCloudTripleStoreConfiguration getConf() {
+        return conf;
+    }
+
+    public void setConf(RdfCloudTripleStoreConfiguration conf) {
+        this.conf = conf;
+    }
+
+    public InferenceEngine getInferenceEngine() {
+        return inferenceEngine;
+    }
+
+    public void setInferenceEngine(InferenceEngine inferenceEngine) {
+        this.inferenceEngine = inferenceEngine;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
new file mode 100644
index 0000000..aed7ed0
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
@@ -0,0 +1,51 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+/**
+ * Class DoNotExpandSP
+ * Date: Mar 15, 2012
+ * Time: 9:39:45 AM
+ */
+public class DoNotExpandSP extends StatementPattern{
+    public DoNotExpandSP() {
+    }
+
+    public DoNotExpandSP(Var subject, Var predicate, Var object) {
+        super(subject, predicate, object);
+    }
+
+    public DoNotExpandSP(Scope scope, Var subject, Var predicate, Var object) {
+        super(scope, subject, predicate, object);
+    }
+
+    public DoNotExpandSP(Var subject, Var predicate, Var object, Var context) {
+        super(subject, predicate, object, context);
+    }
+
+    public DoNotExpandSP(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) {
+        super(scope, subjVar, predVar, objVar, conVar);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
new file mode 100644
index 0000000..aa0b99b
--- /dev/null
+++ b/sail/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
@@ -0,0 +1,34 @@
+package mvm.rya.rdftriplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+/**
+ * Interface InferConstants
+ * Date: Apr 16, 2011
+ * Time: 7:30:47 AM
+ */
+public interface InferConstants {
+
+    public static final String INFERRED = "inferred";
+    public static final String TRUE = "true";
+    public static final String FALSE = "false";
+}



[06/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java b/sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java
new file mode 100644
index 0000000..4a5d871
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/ArbitraryLengthQueryTest.java
@@ -0,0 +1,500 @@
+package mvm.rya;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.openrdf.model.Resource;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.resultio.text.tsv.SPARQLResultsTSVWriter;
+import org.openrdf.repository.Repository;
+import org.openrdf.repository.RepositoryConnection;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.rio.RDFFormat;
+import org.openrdf.rio.RDFParseException;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.rdftriplestore.RdfCloudTripleStore;
+import mvm.rya.rdftriplestore.RyaSailRepository;
+import mvm.rya.rdftriplestore.inference.InferenceEngine;
+import mvm.rya.rdftriplestore.namespace.NamespaceManager;
+import junit.framework.TestCase;
+
+/**
+ * The purpose of this is to provide a test case that illustrates a failure that is being encountered. A working test is
+ * provided as well to demonstrate that a successful query can be made.
+ */
+public class ArbitraryLengthQueryTest extends TestCase {
+
+    /**
+     * The repository used for the tests.
+     */
+    private Repository repository;
+
+    @Override
+    public void setUp() throws Exception {
+        super.setUp();
+
+        final RdfCloudTripleStore store = new MockRdfCloudStore();
+
+        final NamespaceManager nm = new NamespaceManager(store.getRyaDAO(), store.getConf());
+        store.setNamespaceManager(nm);
+
+        repository = new RyaSailRepository(store);
+        repository.initialize();
+
+        load();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        super.tearDown();
+        repository.shutDown();
+    }
+
+    /**
+     * This test works. The expected result is 6 rows ranging from "Model1Class 1" through "Model1Class 6".
+     *
+     * @throws RepositoryException
+     * @throws QueryEvaluationException
+     * @throws TupleQueryResultHandlerException
+     *
+     * @throws MalformedQueryException
+     */
+    public void testWithoutSubquery() throws RepositoryException, QueryEvaluationException, TupleQueryResultHandlerException, MalformedQueryException {
+        final String query = "SELECT ?i ?i_label ?i_class ?i_v1"
+                + "WHERE {"
+                + "?i <http://www.w3.org/2000/01/rdf-schema#label> ?i_label ."
+                + "?i a ?i_class ."
+                + "?i_class <http://www.w3.org/2000/01/rdf-schema#subClassOf>* <http://dragon-research.com/cham/model/model1#Model1Class> ."
+                + "OPTIONAL { ?i <http://dragon-research.com/cham/model/model1#name> ?i_v1 } ."
+                + "}"
+                + "ORDER BY ?i_label";
+
+        final RepositoryConnection conn = repository.getConnection();
+        final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        RdfCloudTripleStoreConnectionTest.CountTupleHandler countTupleHandler = new RdfCloudTripleStoreConnectionTest.CountTupleHandler();
+        tupleQuery.evaluate(countTupleHandler);
+        assertEquals(6, countTupleHandler.getCount());
+        conn.close();
+    }
+
+    /**
+     * This test fails. The expected result is 6 rows ranging from "Model1Class 1 Event" to "Model1Class 6 Event". The
+     * current result is a RejectedExecutionException.
+     *
+     * @throws RepositoryException
+     * @throws QueryEvaluationException
+     * @throws TupleQueryResultHandlerException
+     *
+     * @throws MalformedQueryException
+     */
+    public void testWithSubquery() throws RepositoryException, QueryEvaluationException, TupleQueryResultHandlerException, MalformedQueryException {
+        final String query = "SELECT ?i ?i_label ?i_class ?i_v1 ?i_v2 ?i_v2_label ?i_v2_class ?i_v2_v1"
+                + "WHERE {"
+                + "?i <http://www.w3.org/2000/01/rdf-schema#label> ?i_label ."
+                + "?i a ?i_class ."
+                + "?i_class <http://www.w3.org/2000/01/rdf-schema#subClassOf>* <http://dragon-research.com/cham/model/model1#Event> ."
+                + "OPTIONAL { ?i <http://dragon-research.com/cham/model/model1#name> ?i_v1 } ."
+                + "?i <http://dragon-research.com/cham/model/model1#hasTemporalEntity> ?i_v2 ."
+                + "{"
+                + "SELECT ?i_v2 ?i_v2_label ?i_v2_class ?i_v2_v1"
+                + "WHERE {"
+                + "?i_v2 <http://www.w3.org/2000/01/rdf-schema#label> ?i_v2_label ."
+                + "?i_v2 a ?i_v2_class ."
+                + "?i_v2_class <http://www.w3.org/2000/01/rdf-schema#subClassOf>* <http://dragon-research.com/cham/model/model1#TemporalInstant> ."
+                + "OPTIONAL { ?i_v2 <http://dragon-research.com/cham/model/model1#dateTime> ?i_v2_v1 } ."
+                + "}"
+                + "ORDER BY ?i_v2_label"
+                + "}"
+                + "}"
+                + "ORDER BY ?i_label";
+
+        final RepositoryConnection conn = repository.getConnection();
+        final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        RdfCloudTripleStoreConnectionTest.CountTupleHandler countTupleHandler = new RdfCloudTripleStoreConnectionTest.CountTupleHandler();
+        tupleQuery.evaluate(countTupleHandler);
+        assertEquals(6, countTupleHandler.getCount());
+        conn.close();
+    }
+
+    /**
+     * Load the t-box and a-box turtle from strings defined within this class.
+     *
+     * @throws RepositoryException
+     * @throws RDFParseException
+     * @throws IOException
+     */
+    private void load() throws RepositoryException, RDFParseException, IOException {
+        final RepositoryConnection conn = repository.getConnection();
+
+        // T-Box
+        String ttlString = MODEL_TTL;
+        InputStream stringInput = new ByteArrayInputStream(ttlString.getBytes());
+        conn.add(stringInput, "http://dragon-research.com/cham/model/model1", RDFFormat.TURTLE, new Resource[]{});
+
+        // A-Box
+        ttlString = BUCKET_TTL;
+        stringInput = new ByteArrayInputStream(ttlString.getBytes());
+        conn.add(stringInput, "http://dragon-research.com/cham/bucket/bucket1", RDFFormat.TURTLE, new Resource[]{});
+
+        conn.commit();
+        conn.close();
+    }
+
+    /**
+     * Mock RDF cloud store for one shot testing.
+     */
+    public class MockRdfCloudStore extends RdfCloudTripleStore {
+        public MockRdfCloudStore() {
+            super();
+            final Instance instance = new MockInstance();
+            try {
+                final AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+                setConf(conf);
+
+                final Connector connector = instance.getConnector("", "");
+                final AccumuloRyaDAO cdao = new AccumuloRyaDAO();
+                cdao.setConf(conf);
+                cdao.setConnector(connector);
+                setRyaDAO(cdao);
+                inferenceEngine = new InferenceEngine();
+                inferenceEngine.setRyaDAO(cdao);
+                inferenceEngine.setRefreshGraphSchedule(5000); //every 5 sec
+                inferenceEngine.setConf(conf);
+                setInferenceEngine(inferenceEngine);
+            } catch (final Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    /**
+     * The ontology t-box in turtle.
+     */
+    private static String MODEL_TTL = "@prefix :        <http://dragon-research.com/cham/model/model1#> ."
+            + "@prefix cham:    <http://dragon-research.com/cham/schema#> ."
+            + "@prefix dc:      <http://purl.org/dc/elements/1.1/> ."
+            + "@prefix owl:     <http://www.w3.org/2002/07/owl#> ."
+            + "@prefix qudt:    <http://data.nasa.gov/qudt/owl/qudt#> ."
+            + "@prefix rdf:     <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ."
+            + "@prefix rdfs:    <http://www.w3.org/2000/01/rdf-schema#> ."
+            + "@prefix unit:    <http://data.nasa.gov/qudt/owl/unit#> ."
+            + "@prefix xml:     <http://www.w3.org/XML/1998/namespace> ."
+            + "@prefix xsd:     <http://www.w3.org/2001/XMLSchema#> ."
+            + ""
+            + "<http://dragon-research.com/cham/model/model1>"
+            + "      rdf:type owl:Ontology ;"
+            + "      rdfs:label \"Model1 Ontology\"^^xsd:string ;"
+            + "      :versionInfo \"0.1\"^^xsd:string ;"
+            + "      dc:title \"Model1 Ontology\"^^xsd:string ."
+            + ""
+            + ":ModelClassD"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"ModelClassD\"^^xsd:string ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:maxQualifiedCardinality"
+            + "                        \"1\"^^xsd:nonNegativeInteger ;"
+            + "                owl:onDataRange xsd:string ;"
+            + "                owl:onProperty :name"
+            + "              ] ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:allValuesFrom :Model1ClassAssoc ;"
+            + "                owl:onProperty :hasModel1ClassAssoc"
+            + "              ] ."
+            + ""
+            + ":ModelClassC"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"ModelClassC\"^^xsd:string ;"
+            + "      rdfs:subClassOf :ModelClassD ."
+            + ""
+            + ":Modle1ClassB"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"Modle1ClassB\"^^xsd:string ;"
+            + "      rdfs:subClassOf :ModelClassC ."
+            + ""
+            + ":Model1ClassA"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"Model1ClassA\"^^xsd:string ;"
+            + "      rdfs:subClassOf :Modle1ClassB ."
+            + ""
+            + ":Model1Class"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"Model1Class\"^^xsd:string ;"
+            + "      rdfs:subClassOf :Model1ClassA ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:maxQualifiedCardinality"
+            + "                        \"1\"^^xsd:nonNegativeInteger ;"
+            + "                owl:onDataRange xsd:string ;"
+            + "                owl:onProperty :model1ClassId"
+            + "              ] ."
+            + ""
+            + ":Model1Event"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"Model1Event\"^^xsd:string ;"
+            + "      rdfs:subClassOf :Event ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:allValuesFrom :Model1ClassA ;"
+            + "                owl:onProperty :hasModel1ClassA"
+            + "              ] ."
+            + ""
+            + ":Model1ClassAssoc"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"Model1ClassAssoc\"^^xsd:string ;"
+            + "      rdfs:subClassOf owl:Thing ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:maxQualifiedCardinality"
+            + "                        \"1\"^^xsd:nonNegativeInteger ;"
+            + "                owl:onDataRange xsd:string ;"
+            + "                owl:onProperty :name"
+            + "              ] ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:maxQualifiedCardinality"
+            + "                        \"1\"^^xsd:nonNegativeInteger ;"
+            + "                owl:onClass :ModelClassD ;"
+            + "                owl:onProperty :hasEntity"
+            + "              ] ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:allValuesFrom :ModelClassD ;"
+            + "                owl:onProperty :hasEntity"
+            + "              ] ."
+            + ""
+            + ":TemporalEntity"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"TemporalEntity\"^^xsd:string ;"
+            + "      rdfs:subClassOf owl:Thing ."
+            + ""
+            + ":TemporalInstant"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"TemporalInstant\"^^xsd:string ;"
+            + "      rdfs:subClassOf :TemporalEntity ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:maxQualifiedCardinality"
+            + "                        \"1\"^^xsd:nonNegativeInteger ;"
+            + "                owl:onDataRange xsd:dateTime ;"
+            + "                owl:onProperty :dateTime"
+            + "              ] ."
+            + ""
+            + ":model1ClassId"
+            + "      rdf:type owl:DatatypeProperty ;"
+            + "      rdfs:domain :Model1Class ;"
+            + "      rdfs:label \"model1ClassId\"^^xsd:string ;"
+            + "      rdfs:range xsd:string ."
+            + ""
+            + ":hasModel1ClassAssoc"
+            + "      rdf:type owl:ObjectProperty ;"
+            + "      rdfs:domain :ModelClassD ;"
+            + "      rdfs:label \"hasModel1ClassAssoc\"^^xsd:string ;"
+            + "      rdfs:range :Model1ClassAssoc ."
+            + ""
+            + ":name"
+            + "      rdf:type owl:DatatypeProperty ;"
+            + "      rdfs:domain :Model1ClassAssoc , :ModelClassD ;"
+            + "      rdfs:label \"name\"^^xsd:string ;"
+            + "      rdfs:range xsd:string ."
+            + ""
+            + ":hasTemporalEntity"
+            + "      rdf:type owl:ObjectProperty ;"
+            + "      rdfs:domain :ThreatAnalysis , :Event , :TrackingData , :Threat , :Vulnerability ;"
+            + "      rdfs:label \"hasTemporalEntity\"^^xsd:string ;"
+            + "      rdfs:range :TemporalEntity ."
+            + ""
+            + ":hasEntity"
+            + "      rdf:type owl:ObjectProperty ;"
+            + "      rdfs:domain :Model1ClassAssoc ;"
+            + "      rdfs:label \"hasEntity\"^^xsd:string ;"
+            + "      rdfs:range :ModelClassD ."
+            + ""
+            + ":dateTime"
+            + "      rdf:type owl:DatatypeProperty ;"
+            + "      rdfs:domain :TemporalInstant ;"
+            + "      rdfs:label \"dateTime\"^^xsd:string ;"
+            + "      rdfs:range xsd:dateTime ."
+            + ""
+            + ":Event"
+            + "      rdf:type owl:Class ;"
+            + "      rdfs:label \"Event\"^^xsd:string ;"
+            + "      rdfs:subClassOf :ModelClassD ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:allValuesFrom :TemporalEntity ;"
+            + "                owl:onProperty :hasTemporalEntity"
+            + "              ] ;"
+            + "      rdfs:subClassOf"
+            + "              [ rdf:type owl:Restriction ;"
+            + "                owl:maxQualifiedCardinality"
+            + "                        \"1\"^^xsd:nonNegativeInteger ;"
+            + "                owl:onClass :TemporalEntity ;"
+            + "                owl:onProperty :hasTemporalEntity"
+            + "              ] ."
+            + ""
+            + ":hasModel1ClassA"
+            + "      rdf:type owl:ObjectProperty ;"
+            + "      rdfs:domain :Model1Event ;"
+            + "      rdfs:label \"hasModel1ClassA\"^^xsd:string ;"
+            + "      rdfs:range :Model1ClassA ."
+            + ""
+            + "rdfs:label"
+            + "      rdf:type owl:AnnotationProperty ."
+            + ""
+            + "xsd:date"
+            + "      rdf:type rdfs:Datatype ."
+            + ""
+            + "xsd:time"
+            + "      rdf:type rdfs:Datatype .";
+
+    /**
+     * The ontology a-box in turtle.
+     */
+    private static String BUCKET_TTL = "@prefix :        <http://dragon-research.com/cham/bucket/bucket1#> ."
+            + "@prefix rdfs:    <http://www.w3.org/2000/01/rdf-schema#> ."
+            + "@prefix owl:     <http://www.w3.org/2002/07/owl#> ."
+            + "@prefix xsd:     <http://www.w3.org/2001/XMLSchema#> ."
+            + "@prefix rdf:     <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ."
+            + "@prefix model1:   <http://dragon-research.com/cham/model/model1#> ."
+            + ""
+            + ":i1   a       model1:Model1Class ;"
+            + "      rdfs:label \"Model1Class 1\"^^xsd:string ;"
+            + "      model1:name \"Model1Class 1\"^^xsd:string ;"
+            + "      model1:hasModel1ClassAssoc :i1-assoc ;"
+            + "      model1:model1ClassId \"ID01\"^^xsd:string ."
+            + "      "
+            + ":i1-assoc a model1:Model1ClassAssoc ;"
+            + "      rdfs:label \"Model1Class 1 Assoc\"^^xsd:string ;"
+            + "      model1:hasEntity :i1-event ."
+            + "      "
+            + ":i1-event a model1:Model1Event ;"
+            + "      rdfs:label \"Model1Class 1 Event\"^^xsd:string ;"
+            + "      model1:hasTemporalEntity :i1-time ."
+            + ""
+            + ":i1-time a model1:TemporalInstant ;"
+            + "      rdfs:label \"Model1Class 1 Time\"^^xsd:string ;"
+            + "      model1:dateTime \"1994-02-07T21:47:01.000Z\"^^xsd:dateTime ."
+            + "      "
+            + ":i2   a       model1:Model1Class ;"
+            + "      rdfs:label \"Model1Class 2\"^^xsd:string ;"
+            + "      model1:name \"Model1Class 2\"^^xsd:string ;"
+            + "      model1:hasModel1ClassAssoc :i2-assoc ;"
+            + "      model1:model1ClassId \"ID02\"^^xsd:string ."
+            + ""
+            + ":i2-assoc a model1:Model1ClassAssoc ;"
+            + "      rdfs:label \"Model1Class 2 Assoc\"^^xsd:string ;"
+            + "      model1:hasEntity :i2-event ."
+            + "      "
+            + ":i2-event a model1:Model1Event ;"
+            + "      rdfs:label \"Model1Class 2 Event\"^^xsd:string ;"
+            + "      model1:hasTemporalEntity :i2-time ."
+            + ""
+            + ":i2-time a model1:TemporalInstant ;"
+            + "      rdfs:label \"Model1Class 2 Time\"^^xsd:string ;"
+            + "      model1:dateTime \"1995-11-06T05:15:01.000Z\"^^xsd:dateTime ."
+            + "      "
+            + ":i3   a       model1:Model1Class ;"
+            + "      rdfs:label \"Model1Class 3\"^^xsd:string ;"
+            + "      model1:name \"Model1Class 3\"^^xsd:string ;"
+            + "      model1:hasModel1ClassAssoc :i3-assoc ;"
+            + "      model1:model1ClassId \"ID03\"^^xsd:string ."
+            + ""
+            + ":i3-assoc a model1:Model1ClassAssoc ;"
+            + "      rdfs:label \"Model1Class 3 Assoc\"^^xsd:string ;"
+            + "      model1:hasEntity :i3-event ."
+            + "      "
+            + ":i3-event a model1:Model1Event ;"
+            + "      rdfs:label \"Model1Class 3 Event\"^^xsd:string ;"
+            + "      model1:hasTemporalEntity :i3-time ."
+            + ""
+            + ":i3-time a model1:TemporalInstant ;"
+            + "      rdfs:label \"Model1Class 3 Time\"^^xsd:string ;"
+            + "      model1:dateTime \"1999-04-30T16:30:00.000Z\"^^xsd:dateTime ."
+            + "      "
+            + ":i4   a       model1:Model1Class ;"
+            + "      rdfs:label \"Model1Class 4\"^^xsd:string ;"
+            + "      model1:name \"Model1Class 4\"^^xsd:string ;"
+            + "      model1:hasModel1ClassAssoc :i4-assoc ;"
+            + "      model1:model1ClassId \"ID04\"^^xsd:string ."
+            + ""
+            + ":i4-assoc a model1:Model1ClassAssoc ;"
+            + "      rdfs:label \"Model1Class 4 Assoc\"^^xsd:string ;"
+            + "      model1:hasEntity :i4-event ."
+            + "      "
+            + ":i4-event a model1:Model1Event ;"
+            + "      rdfs:label \"Model1Class 4 Event\"^^xsd:string ;"
+            + "      model1:hasTemporalEntity :i4-time ."
+            + ""
+            + ":i4-time a model1:TemporalInstant ;"
+            + "      rdfs:label \"Model1Class 4 Time\"^^xsd:string ;"
+            + "      model1:dateTime \"2001-02-27T21:20:00.000Z\"^^xsd:dateTime ."
+            + "      "
+            + ":i5   a       model1:Model1Class ;"
+            + "      rdfs:label \"Model1Class 5\"^^xsd:string ;"
+            + "      model1:name \"Model1Class 5\"^^xsd:string ;"
+            + "      model1:hasModel1ClassAssoc :i5-assoc ;"
+            + "      model1:model1ClassId \"ID05\"^^xsd:string ."
+            + ""
+            + ":i5-assoc a model1:Model1ClassAssoc ;"
+            + "      rdfs:label \"Model1Class 5 Assoc\"^^xsd:string ;"
+            + "      model1:hasEntity :i5-event ."
+            + "      "
+            + ":i5-event a model1:Model1Event ;"
+            + "      rdfs:label \"Model1Class 5 Event\"^^xsd:string ;"
+            + "      model1:hasTemporalEntity :i5-time ."
+            + ""
+            + ":i5-time a model1:TemporalInstant ;"
+            + "      rdfs:label \"Model1Class 5 Time\"^^xsd:string ;"
+            + "      model1:dateTime \"2002-01-16T00:30:00.000Z\"^^xsd:dateTime ."
+            + "      "
+            + ":i6   a       model1:Model1Class ;"
+            + "      rdfs:label \"Model1Class 6\"^^xsd:string ;"
+            + "      model1:name \"Model1Class 6\"^^xsd:string ;"
+            + "      model1:hasModel1ClassAssoc :i6-assoc ;"
+            + "      model1:model1ClassId \"ID06\"^^xsd:string ."
+            + ""
+            + ":i6-assoc a model1:Model1ClassAssoc ;"
+            + "      rdfs:label \"Model1Class 6 Assoc\"^^xsd:string ;"
+            + "      model1:hasEntity :i6-event ."
+            + "      "
+            + ":i6-event a model1:Model1Event ;"
+            + "      rdfs:label \"Model1Class 6 Event\"^^xsd:string ;"
+            + "      model1:hasTemporalEntity :i6-time ."
+            + ""
+            + ":i6-time a model1:TemporalInstant ;"
+            + "      rdfs:label \"Model1Class 6 Time\"^^xsd:string ;"
+            + "      model1:dateTime \"2003-04-08T13:43:00.000Z\"^^xsd:dateTime .";
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/HashJoinTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/HashJoinTest.java b/sail/src/test/java/mvm/rya/HashJoinTest.java
new file mode 100644
index 0000000..bbcdbcd
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/HashJoinTest.java
@@ -0,0 +1,374 @@
+package mvm.rya;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import junit.framework.TestCase;
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.RdfCloudTripleStoreUtils;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaType;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.query.join.HashJoin;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertFalse;
+import static junit.framework.Assert.assertTrue;
+
+/**
+ * Date: 7/24/12
+ * Time: 5:51 PM
+ */
+public class HashJoinTest {
+    private AccumuloRyaDAO dao;
+    static String litdupsNS = "urn:test:litdups#";
+    private Connector connector;
+    private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+    @Before
+    public void init() throws Exception {
+        dao = new AccumuloRyaDAO();
+        connector = new MockInstance().getConnector("", "");
+        dao.setConnector(connector);
+        dao.setConf(conf);
+        dao.init();
+    }
+
+    @After
+    public void destroy() throws Exception {
+        dao.destroy();
+    }
+
+    @Test
+    public void testSimpleJoin() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        
+
+        //1 join
+        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two));
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj3));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testSimpleJoinMultiWay() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj2, pred, three));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, three));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        dao.add(new RyaStatement(subj4, pred, four));
+        
+
+        //1 join
+        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj3));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testMergeJoinMultiWay() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj2, pred, three));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        dao.add(new RyaStatement(subj4, pred, four));
+        
+
+        //1 join
+        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testMergeJoinMultiWayNone() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        
+
+        //1 join
+        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        assertFalse(join.hasNext());
+        join.close();
+    }
+
+    @Test
+    public void testMergeJoinMultiWayNone2() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        
+
+        //1 join
+        HashJoin hjoin = new HashJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = hjoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        assertFalse(join.hasNext());
+        join.close();
+    }
+
+    @Test
+    public void testSimpleHashJoinPredicateOnly() throws Exception {
+        //add data
+        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
+        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
+        RyaType one = new RyaType("1");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred1, one));
+        dao.add(new RyaStatement(subj1, pred2, one));
+        dao.add(new RyaStatement(subj2, pred1, one));
+        dao.add(new RyaStatement(subj2, pred2, one));
+        dao.add(new RyaStatement(subj3, pred1, one));
+        dao.add(new RyaStatement(subj3, pred2, one));
+        dao.add(new RyaStatement(subj4, pred1, one));
+        dao.add(new RyaStatement(subj4, pred2, one));
+        
+
+        //1 join
+        HashJoin ijoin = new HashJoin(dao.getQueryEngine());
+        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
+
+        int count = 0;
+        while (join.hasNext()) {
+            RyaStatement next = join.next();
+            count++;
+        }
+        assertEquals(4, count);
+        join.close();
+    }
+
+    @Test
+    public void testSimpleMergeJoinPredicateOnly2() throws Exception {
+        //add data
+        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
+        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred1, one));
+        dao.add(new RyaStatement(subj1, pred1, two));
+        dao.add(new RyaStatement(subj1, pred1, three));
+        dao.add(new RyaStatement(subj1, pred2, one));
+        dao.add(new RyaStatement(subj1, pred2, two));
+        dao.add(new RyaStatement(subj1, pred2, three));
+        dao.add(new RyaStatement(subj2, pred1, one));
+        dao.add(new RyaStatement(subj2, pred1, two));
+        dao.add(new RyaStatement(subj2, pred1, three));
+        dao.add(new RyaStatement(subj2, pred2, one));
+        dao.add(new RyaStatement(subj2, pred2, two));
+        dao.add(new RyaStatement(subj2, pred2, three));
+        dao.add(new RyaStatement(subj3, pred1, one));
+        dao.add(new RyaStatement(subj3, pred1, two));
+        dao.add(new RyaStatement(subj3, pred1, three));
+        dao.add(new RyaStatement(subj3, pred2, one));
+        dao.add(new RyaStatement(subj3, pred2, two));
+        dao.add(new RyaStatement(subj3, pred2, three));
+        dao.add(new RyaStatement(subj4, pred1, one));
+        dao.add(new RyaStatement(subj4, pred1, two));
+        dao.add(new RyaStatement(subj4, pred1, three));
+        dao.add(new RyaStatement(subj4, pred2, one));
+        dao.add(new RyaStatement(subj4, pred2, two));
+        dao.add(new RyaStatement(subj4, pred2, three));
+        
+
+        //1 join
+        HashJoin ijoin = new HashJoin(dao.getQueryEngine());
+        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
+
+        int count = 0;
+        while (join.hasNext()) {
+            RyaStatement next = join.next();
+            count++;
+        }
+        assertEquals(12, count);
+        join.close();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/IterativeJoinTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/IterativeJoinTest.java b/sail/src/test/java/mvm/rya/IterativeJoinTest.java
new file mode 100644
index 0000000..610b8eb
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/IterativeJoinTest.java
@@ -0,0 +1,365 @@
+package mvm.rya;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import junit.framework.TestCase;
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.RdfCloudTripleStoreUtils;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaType;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.query.join.IterativeJoin;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import static junit.framework.Assert.*;
+
+/**
+ * Date: 7/24/12
+ * Time: 5:51 PM
+ */
+public class IterativeJoinTest {
+    private AccumuloRyaDAO dao;
+    static String litdupsNS = "urn:test:litdups#";
+    private Connector connector;
+    private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+    @Before
+    public void init() throws Exception {
+        dao = new AccumuloRyaDAO();
+        connector = new MockInstance().getConnector("", "");
+        dao.setConnector(connector);
+        dao.setConf(conf);
+        dao.init();
+    }
+
+    @After
+    public void destroy() throws Exception {
+        dao.destroy();
+    }
+
+    @Test
+    public void testSimpleIterativeJoin() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+
+        //1 join
+        IterativeJoin iterJoin = new IterativeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = iterJoin.join(null, new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two));
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj3));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testSimpleIterativeJoinMultiWay() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj2, pred, three));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, three));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        dao.add(new RyaStatement(subj4, pred, four));
+
+        //1 join
+        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj3));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testIterativeJoinMultiWay() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj2, pred, three));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        dao.add(new RyaStatement(subj4, pred, four));
+
+        //1 join
+        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testIterativeJoinMultiWayNone() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+
+        //1 join
+        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null,
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        assertFalse(join.hasNext());
+        join.close();
+    }
+
+    @Test
+    public void testIterativeJoinMultiWayNone2() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+
+        //1 join
+        IterativeJoin iterativeJoin = new IterativeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = iterativeJoin.join(null, new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, one),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, two),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, three),
+                new RdfCloudTripleStoreUtils.CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        assertFalse(join.hasNext());
+        join.close();
+    }
+
+    @Test
+    public void testSimpleIterativeJoinPredicateOnly() throws Exception {
+        //add data
+        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
+        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
+        RyaType one = new RyaType("1");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred1, one));
+        dao.add(new RyaStatement(subj1, pred2, one));
+        dao.add(new RyaStatement(subj2, pred1, one));
+        dao.add(new RyaStatement(subj2, pred2, one));
+        dao.add(new RyaStatement(subj3, pred1, one));
+        dao.add(new RyaStatement(subj3, pred2, one));
+        dao.add(new RyaStatement(subj4, pred1, one));
+        dao.add(new RyaStatement(subj4, pred2, one));
+        
+
+        //1 join
+        IterativeJoin ijoin = new IterativeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
+
+        int count = 0;
+        while (join.hasNext()) {
+            RyaStatement next = join.next();
+            count++;
+        }
+        assertEquals(4, count);
+        join.close();
+    }
+
+    @Test
+    public void testSimpleIterativeJoinPredicateOnly2() throws Exception {
+        //add data
+        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
+        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred1, one));
+        dao.add(new RyaStatement(subj1, pred1, two));
+        dao.add(new RyaStatement(subj1, pred1, three));
+        dao.add(new RyaStatement(subj1, pred2, one));
+        dao.add(new RyaStatement(subj1, pred2, two));
+        dao.add(new RyaStatement(subj1, pred2, three));
+        dao.add(new RyaStatement(subj2, pred1, one));
+        dao.add(new RyaStatement(subj2, pred1, two));
+        dao.add(new RyaStatement(subj2, pred1, three));
+        dao.add(new RyaStatement(subj2, pred2, one));
+        dao.add(new RyaStatement(subj2, pred2, two));
+        dao.add(new RyaStatement(subj2, pred2, three));
+        dao.add(new RyaStatement(subj3, pred1, one));
+        dao.add(new RyaStatement(subj3, pred1, two));
+        dao.add(new RyaStatement(subj3, pred1, three));
+        dao.add(new RyaStatement(subj3, pred2, one));
+        dao.add(new RyaStatement(subj3, pred2, two));
+        dao.add(new RyaStatement(subj3, pred2, three));
+        dao.add(new RyaStatement(subj4, pred1, one));
+        dao.add(new RyaStatement(subj4, pred1, two));
+        dao.add(new RyaStatement(subj4, pred1, three));
+        dao.add(new RyaStatement(subj4, pred2, one));
+        dao.add(new RyaStatement(subj4, pred2, two));
+        dao.add(new RyaStatement(subj4, pred2, three));
+        
+
+        //1 join
+        IterativeJoin ijoin = new IterativeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaStatement, RyaDAOException> join = ijoin.join(null, pred1, pred2);
+
+        int count = 0;
+        while (join.hasNext()) {
+            RyaStatement next = join.next();
+            count++;
+        }
+        assertEquals(12, count);
+        join.close();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/MergeJoinTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/MergeJoinTest.java b/sail/src/test/java/mvm/rya/MergeJoinTest.java
new file mode 100644
index 0000000..e4f07c4
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/MergeJoinTest.java
@@ -0,0 +1,370 @@
+package mvm.rya;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaType;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.query.join.MergeJoin;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import static junit.framework.Assert.*;
+import static mvm.rya.api.RdfCloudTripleStoreUtils.CustomEntry;
+
+/**
+ * TODO: Move to rya.api when we have proper mock ryaDao
+ *
+ * Date: 7/24/12
+ * Time: 9:49 AM
+ */
+public class MergeJoinTest {
+
+    private AccumuloRyaDAO dao;
+    static String litdupsNS = "urn:test:litdups#";
+    private Connector connector;
+    private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+    @Before
+    public void init() throws Exception {
+        dao = new AccumuloRyaDAO();
+        connector = new MockInstance().getConnector("", "");
+        dao.setConnector(connector);
+        dao.setConf(conf);
+        dao.init();
+    }
+
+    @After
+    public void destroy() throws Exception {
+        dao.destroy();
+    }
+
+    @Test
+    public void testSimpleMergeJoin() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        
+
+        //1 join
+        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
+                new CustomEntry<RyaURI, RyaType>(pred, two));
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj3));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testSimpleMergeJoinPredicateOnly() throws Exception {
+        //add data
+        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
+        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
+        RyaType one = new RyaType("1");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred1, one));
+        dao.add(new RyaStatement(subj1, pred2, one));
+        dao.add(new RyaStatement(subj2, pred1, one));
+        dao.add(new RyaStatement(subj2, pred2, one));
+        dao.add(new RyaStatement(subj3, pred1, one));
+        dao.add(new RyaStatement(subj3, pred2, one));
+        dao.add(new RyaStatement(subj4, pred1, one));
+        dao.add(new RyaStatement(subj4, pred2, one));
+        
+
+        //1 join
+        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaStatement, RyaDAOException> join = mergeJoin.join(null, pred1, pred2);
+
+        int count = 0;
+        while (join.hasNext()) {
+            RyaStatement next = join.next();
+            count++;
+        }
+        assertEquals(4, count);
+        join.close();
+    }
+
+    @Test
+    public void testSimpleMergeJoinPredicateOnly2() throws Exception {
+        //add data
+        RyaURI pred1 = new RyaURI(litdupsNS, "pred1");
+        RyaURI pred2 = new RyaURI(litdupsNS, "pred2");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred1, one));
+        dao.add(new RyaStatement(subj1, pred1, two));
+        dao.add(new RyaStatement(subj1, pred1, three));
+        dao.add(new RyaStatement(subj1, pred2, one));
+        dao.add(new RyaStatement(subj1, pred2, two));
+        dao.add(new RyaStatement(subj1, pred2, three));
+        dao.add(new RyaStatement(subj2, pred1, one));
+        dao.add(new RyaStatement(subj2, pred1, two));
+        dao.add(new RyaStatement(subj2, pred1, three));
+        dao.add(new RyaStatement(subj2, pred2, one));
+        dao.add(new RyaStatement(subj2, pred2, two));
+        dao.add(new RyaStatement(subj2, pred2, three));
+        dao.add(new RyaStatement(subj3, pred1, one));
+        dao.add(new RyaStatement(subj3, pred1, two));
+        dao.add(new RyaStatement(subj3, pred1, three));
+        dao.add(new RyaStatement(subj3, pred2, one));
+        dao.add(new RyaStatement(subj3, pred2, two));
+        dao.add(new RyaStatement(subj3, pred2, three));
+        dao.add(new RyaStatement(subj4, pred1, one));
+        dao.add(new RyaStatement(subj4, pred1, two));
+        dao.add(new RyaStatement(subj4, pred1, three));
+        dao.add(new RyaStatement(subj4, pred2, one));
+        dao.add(new RyaStatement(subj4, pred2, two));
+        dao.add(new RyaStatement(subj4, pred2, three));
+        
+
+        //1 join
+        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaStatement, RyaDAOException> join = mergeJoin.join(null, pred1, pred2);
+
+        int count = 0;
+        while (join.hasNext()) {
+            RyaStatement next = join.next();
+            count++;
+        }
+        assertEquals(12, count);
+        join.close();
+    }
+
+    @Test
+    public void testSimpleMergeJoinMultiWay() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj2, pred, three));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, three));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        dao.add(new RyaStatement(subj4, pred, four));
+        
+
+        //1 join
+        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
+                new CustomEntry<RyaURI, RyaType>(pred, two),
+                new CustomEntry<RyaURI, RyaType>(pred, three),
+                new CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj3));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testMergeJoinMultiWay() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, two));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, two));
+        dao.add(new RyaStatement(subj2, pred, three));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, one));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        dao.add(new RyaStatement(subj4, pred, four));
+        
+
+        //1 join
+        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
+                new CustomEntry<RyaURI, RyaType>(pred, two),
+                new CustomEntry<RyaURI, RyaType>(pred, three),
+                new CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        Set<RyaURI> uris = new HashSet<RyaURI>();
+        while (join.hasNext()) {
+            uris.add(join.next());
+        }
+        assertTrue(uris.contains(subj1));
+        assertTrue(uris.contains(subj2));
+        assertTrue(uris.contains(subj4));
+        join.close();
+    }
+
+    @Test
+    public void testMergeJoinMultiWayNone() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, three));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        dao.add(new RyaStatement(subj4, pred, three));
+        
+
+        //1 join
+        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
+                new CustomEntry<RyaURI, RyaType>(pred, two),
+                new CustomEntry<RyaURI, RyaType>(pred, three),
+                new CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        assertFalse(join.hasNext());
+        join.close();
+    }
+
+    @Test
+    public void testMergeJoinMultiWayNone2() throws Exception {
+        //add data
+        RyaURI pred = new RyaURI(litdupsNS, "pred1");
+        RyaType zero = new RyaType("0");
+        RyaType one = new RyaType("1");
+        RyaType two = new RyaType("2");
+        RyaType three = new RyaType("3");
+        RyaType four = new RyaType("4");
+        RyaURI subj1 = new RyaURI(litdupsNS, "subj1");
+        RyaURI subj2 = new RyaURI(litdupsNS, "subj2");
+        RyaURI subj3 = new RyaURI(litdupsNS, "subj3");
+        RyaURI subj4 = new RyaURI(litdupsNS, "subj4");
+
+        dao.add(new RyaStatement(subj1, pred, one));
+        dao.add(new RyaStatement(subj1, pred, four));
+        dao.add(new RyaStatement(subj2, pred, zero));
+        dao.add(new RyaStatement(subj2, pred, one));
+        dao.add(new RyaStatement(subj2, pred, four));
+        dao.add(new RyaStatement(subj3, pred, two));
+        dao.add(new RyaStatement(subj3, pred, four));
+        dao.add(new RyaStatement(subj4, pred, one));
+        dao.add(new RyaStatement(subj4, pred, two));
+        
+
+        //1 join
+        MergeJoin mergeJoin = new MergeJoin(dao.getQueryEngine());
+        CloseableIteration<RyaURI, RyaDAOException> join = mergeJoin.join(null, new CustomEntry<RyaURI, RyaType>(pred, one),
+                new CustomEntry<RyaURI, RyaType>(pred, two),
+                new CustomEntry<RyaURI, RyaType>(pred, three),
+                new CustomEntry<RyaURI, RyaType>(pred, four)
+        );
+
+        assertFalse(join.hasNext());
+        join.close();
+    }
+}


[05/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java
new file mode 100644
index 0000000..31efa3a
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java
@@ -0,0 +1,1363 @@
+package mvm.rya;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static mvm.rya.api.RdfCloudTripleStoreConstants.NAMESPACE;
+
+import java.io.InputStream;
+import java.util.List;
+
+import junit.framework.TestCase;
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConstants;
+import mvm.rya.rdftriplestore.RdfCloudTripleStore;
+import mvm.rya.rdftriplestore.RyaSailRepository;
+import mvm.rya.rdftriplestore.inference.InferenceEngine;
+import mvm.rya.rdftriplestore.namespace.NamespaceManager;
+
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.junit.Ignore;
+import org.openrdf.model.Literal;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.URIImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.Update;
+import org.openrdf.repository.Repository;
+import org.openrdf.repository.RepositoryConnection;
+import org.openrdf.repository.RepositoryResult;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.rio.RDFFormat;
+
+/**
+ * Class RdfCloudTripleStoreConnectionTest
+ * Date: Mar 3, 2011
+ * Time: 12:03:29 PM
+ */
+public class RdfCloudTripleStoreConnectionTest extends TestCase {
+    private Repository repository;
+    ValueFactoryImpl vf = new ValueFactoryImpl();
+    private InferenceEngine internalInferenceEngine;
+
+    static String litdupsNS = "urn:test:litdups#";
+    URI cpu = vf.createURI(litdupsNS, "cpu");
+    protected RdfCloudTripleStore store;
+
+    public void setUp() throws Exception {
+        super.setUp();
+        store = new MockRdfCloudStore();
+//        store.setDisplayQueryPlan(true);
+//        store.setInferencing(false);
+        NamespaceManager nm = new NamespaceManager(store.getRyaDAO(), store.getConf());
+        store.setNamespaceManager(nm);
+        repository = new RyaSailRepository(store);
+        repository.initialize();
+    }
+
+    public void tearDown() throws Exception {
+        super.tearDown();
+        repository.shutDown();
+    }
+
+    public void testAddStatement() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        URI uri1 = vf.createURI(litdupsNS, "uri1");
+        conn.add(cpu, loadPerc, uri1);
+        conn.commit();
+
+        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true);
+        int count = 0;
+        while (result.hasNext()) {
+            count++;
+            result.next();
+        }
+        result.close();
+        assertEquals(1, count);
+
+        //clean up
+        conn.remove(cpu, loadPerc, uri1);
+
+//        //test removal
+        result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]);
+        count = 0;
+        while (result.hasNext()) {
+            count++;
+            result.next();
+        }
+        result.close();
+        assertEquals(0, count);
+
+        conn.close();
+    }
+
+//    public void testAddAuth() throws Exception {
+//        RepositoryConnection conn = repository.getConnection();
+//        URI cpu = vf.createURI(litdupsNS, "cpu");
+//        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+//        URI uri1 = vf.createURI(litdupsNS, "uri1");
+//        URI uri2 = vf.createURI(litdupsNS, "uri2");
+//        URI uri3 = vf.createURI(litdupsNS, "uri3");
+//        URI auth1 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "1");
+//        URI auth2 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "2");
+//        URI auth3 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "3");
+//        conn.add(cpu, loadPerc, uri1, auth1, auth2, auth3);
+//        conn.add(cpu, loadPerc, uri2, auth2, auth3);
+//        conn.add(cpu, loadPerc, uri3, auth3);
+//        conn.commit();
+//
+//        //query with no auth
+//        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true);
+//        int count = 0;
+//        while (result.hasNext()) {
+//            count++;
+//            result.next();
+//        }
+//        assertEquals(0, count);
+//        result.close();
+//
+//        String query = "select * where {" +
+//                "<" + cpu.toString() + "> ?p ?o1." +
+//                "}";
+//        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("2"));
+//        CountTupleHandler cth = new CountTupleHandler();
+//        tupleQuery.evaluate(cth);
+//        assertEquals(2, cth.getCount());
+//
+//        conn.close();
+//    }
+
+    public void testEvaluate() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        URI uri1 = vf.createURI(litdupsNS, "uri1");
+        conn.add(cpu, loadPerc, uri1);
+        conn.commit();
+
+        String query = "select * where {" +
+                "?x <" + loadPerc.stringValue() + "> ?o1." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        assertEquals(cth.getCount(), 1);
+        conn.close();
+    }
+
+    public void testEvaluateMultiLine() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        URI uri1 = vf.createURI(litdupsNS, "uri1");
+        URI pred2 = vf.createURI(litdupsNS, "pred2");
+        URI uri2 = vf.createURI(litdupsNS, "uri2");
+        conn.add(cpu, loadPerc, uri1);
+        conn.add(cpu, pred2, uri2);
+        conn.commit();
+
+        String query = "select * where {" +
+                "?x <" + loadPerc.stringValue() + "> ?o1." +
+                "?x <" + pred2.stringValue() + "> ?o2." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, RdfCloudTripleStoreConstants.VALUE_FACTORY.createLiteral(true));
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(cth.getCount(), 1);
+    }
+
+    public void testPOObjRange() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        Literal six = vf.createLiteral("6");
+        Literal sev = vf.createLiteral("7");
+        Literal ten = vf.createLiteral("10");
+        conn.add(cpu, loadPerc, six);
+        conn.add(cpu, loadPerc, sev);
+        conn.add(cpu, loadPerc, ten);
+        conn.commit();
+
+        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
+                "select * where {" +
+                "?x <" + loadPerc.stringValue() + "> ?o.\n" +
+                "FILTER(mvm:range(?o, '6', '8'))." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(2, cth.getCount());
+    }
+
+    public void testPOPredRange() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc1");
+        URI loadPerc2 = vf.createURI(litdupsNS, "loadPerc2");
+        URI loadPerc3 = vf.createURI(litdupsNS, "loadPerc3");
+        URI loadPerc4 = vf.createURI(litdupsNS, "loadPerc4");
+        Literal six = vf.createLiteral("6");
+        Literal sev = vf.createLiteral("7");
+        Literal ten = vf.createLiteral("10");
+        conn.add(cpu, loadPerc, six);
+        conn.add(cpu, loadPerc2, sev);
+        conn.add(cpu, loadPerc4, ten);
+        conn.commit();
+
+        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
+                "select * where {" +
+                "?x ?p ?o.\n" +
+                "FILTER(mvm:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(cth.getCount(), 2);
+    }
+
+    public void testSPOPredRange() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc1");
+        URI loadPerc2 = vf.createURI(litdupsNS, "loadPerc2");
+        URI loadPerc3 = vf.createURI(litdupsNS, "loadPerc3");
+        URI loadPerc4 = vf.createURI(litdupsNS, "loadPerc4");
+        Literal six = vf.createLiteral("6");
+        Literal sev = vf.createLiteral("7");
+        Literal ten = vf.createLiteral("10");
+        conn.add(cpu, loadPerc, six);
+        conn.add(cpu, loadPerc2, sev);
+        conn.add(cpu, loadPerc4, ten);
+        conn.commit();
+
+        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
+                "select * where {" +
+                "<" + cpu.stringValue() + "> ?p ?o.\n" +
+                "FILTER(mvm:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(2, cth.getCount());
+    }
+
+    public void testSPOSubjRange() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI cpu2 = vf.createURI(litdupsNS, "cpu2");
+        URI cpu3 = vf.createURI(litdupsNS, "cpu3");
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        Literal six = vf.createLiteral("6");
+        Literal sev = vf.createLiteral("7");
+        Literal ten = vf.createLiteral("10");
+        conn.add(cpu, loadPerc, six);
+        conn.add(cpu2, loadPerc, sev);
+        conn.add(cpu3, loadPerc, ten);
+        conn.commit();
+
+        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
+                "select * where {" +
+                "?s ?p ?o.\n" +
+                "FILTER(mvm:range(?s, <" + cpu.stringValue() + ">, <" + cpu2.stringValue() + ">))." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(cth.getCount(), 2);
+    }
+
+    public void testSPOObjRange() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        Literal six = vf.createLiteral("6");
+        Literal sev = vf.createLiteral("7");
+        Literal ten = vf.createLiteral("10");
+        conn.add(cpu, loadPerc, six);
+        conn.add(cpu, loadPerc, sev);
+        conn.add(cpu, loadPerc, ten);
+        conn.commit();
+
+        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
+                "select * where {" +
+                "<" + cpu.stringValue() + "> <" + loadPerc.stringValue() + "> ?o.\n" +
+                "FILTER(mvm:range(?o, '6', '8'))." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(cth.getCount(), 2);
+    }
+
+    public void testOSPObjRange() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        Literal six = vf.createLiteral("6");
+        Literal sev = vf.createLiteral("7");
+        Literal ten = vf.createLiteral("10");
+        conn.add(cpu, loadPerc, six);
+        conn.add(cpu, loadPerc, sev);
+        conn.add(cpu, loadPerc, ten);
+        conn.commit();
+
+        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
+                "select * where {" +
+                "?s ?p ?o.\n" +
+                "FILTER(mvm:range(?o, '6', '8'))." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(cth.getCount(), 2);
+    }
+    
+    public void testRegexFilter() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        URI testClass = vf.createURI(litdupsNS, "test");
+        Literal six = vf.createLiteral("6");
+        Literal sev = vf.createLiteral("7");
+        Literal ten = vf.createLiteral("10");
+        conn.add(cpu, loadPerc, six);
+        conn.add(cpu, loadPerc, sev);
+        conn.add(cpu, loadPerc, ten);
+        conn.add(cpu, RDF.TYPE, testClass);
+        conn.commit();
+
+        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
+                "select * where {" +
+                String.format("<%s> ?p ?o.\n", cpu.stringValue()) +
+                "FILTER(regex(?o, '^1'))." +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler cth = new CountTupleHandler();
+        tupleQuery.evaluate(cth);
+        conn.close();
+        assertEquals(cth.getCount(), 1);
+    }
+
+    public void testMMRTS152() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        URI loadPerc = vf.createURI(litdupsNS, "testPred");
+        URI uri1 = vf.createURI(litdupsNS, "uri1");
+        conn.add(cpu, loadPerc, uri1);
+        conn.commit();
+
+        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, false, new Resource[0]);
+//        RdfCloudTripleStoreCollectionStatementsIterator iterator = new RdfCloudTripleStoreCollectionStatementsIterator(
+//                cpu, loadPerc, null, store.connector,
+//                vf, new Configuration(), null);
+
+        while (result.hasNext()) {
+            assertTrue(result.hasNext());
+            assertNotNull(result.next());
+        }
+
+        conn.close();
+    }
+
+    public void testDuplicateLiterals() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        Literal lit1 = vf.createLiteral(0.0);
+        Literal lit2 = vf.createLiteral(0.0);
+        Literal lit3 = vf.createLiteral(0.0);
+
+        conn.add(cpu, loadPerc, lit1);
+        conn.add(cpu, loadPerc, lit2);
+        conn.add(cpu, loadPerc, lit3);
+        conn.commit();
+
+        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]);
+        int count = 0;
+        while (result.hasNext()) {
+            count++;
+            result.next();
+        }
+        result.close();
+        assertEquals(1, count);
+
+        //clean up
+        conn.remove(cpu, loadPerc, lit1);
+        conn.close();
+    }
+
+    public void testNotDuplicateUris() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        URI uri1 = vf.createURI(litdupsNS, "uri1");
+        URI uri2 = vf.createURI(litdupsNS, "uri1");
+        URI uri3 = vf.createURI(litdupsNS, "uri1");
+
+        conn.add(cpu, loadPerc, uri1);
+        conn.add(cpu, loadPerc, uri2);
+        conn.add(cpu, loadPerc, uri3);
+        conn.commit();
+
+        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]);
+        int count = 0;
+        while (result.hasNext()) {
+            count++;
+            result.next();
+        }
+        result.close();
+        assertEquals(1, count);
+
+        //clean up
+        conn.remove(cpu, loadPerc, uri1);
+        conn.close();
+    }
+
+    public void testNamespaceUsage() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+        conn.setNamespace("lit", litdupsNS);
+        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
+        final URI uri1 = vf.createURI(litdupsNS, "uri1");
+        conn.add(cpu, loadPerc, uri1);
+        conn.commit();
+
+        String query = "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {lit:cpu lit:loadPerc ?o.}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.evaluate(new TupleQueryResultHandler() {
+
+            @Override
+            public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
+            }
+
+            @Override
+            public void endQueryResult() throws TupleQueryResultHandlerException {
+
+            }
+
+            @Override
+            public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
+                assertTrue(uri1.toString().equals(bindingSet.getBinding("o").getValue().stringValue()));
+            }
+
+            @Override
+            public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException {
+            }
+
+            @Override
+            public void handleLinks(List<String> paramList) throws QueryResultHandlerException {
+            }
+        });
+        conn.close();
+    }
+
+    public void testSubPropertyOf() throws Exception {
+        if(internalInferenceEngine == null) return; //infer not supported;
+
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "memberOf")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "associatedWith")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "gradDegreeFrom"), vf.createURI(litdupsNS, "Yale")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "ProfessorC"), vf.createURI(litdupsNS, "memberOf"), vf.createURI(litdupsNS, "Harvard")));
+        conn.commit();
+        conn.close();
+
+        internalInferenceEngine.refreshGraph();
+
+        conn = repository.getConnection();
+
+        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:degreeFrom lit:Harvard.}";
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:memberOf lit:Harvard.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:associatedWith ?o.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:gradDegreeFrom lit:Yale.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testEquivPropOf() throws Exception {
+        if(internalInferenceEngine == null) return; //infer not supported;
+
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), OWL.EQUIVALENTPROPERTY, vf.createURI(litdupsNS, "ugradDegreeFrom")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "ugradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradC"), vf.createURI(litdupsNS, "ugraduateDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
+        conn.commit();
+        conn.close();
+
+        internalInferenceEngine.refreshGraph();
+
+        conn = repository.getConnection();
+
+        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:ugradDegreeFrom lit:Harvard.}";
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testSymmPropOf() throws Exception {
+        if(internalInferenceEngine == null) return; //infer not supported;
+
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Bob"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "James"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff")));
+        conn.commit();
+        conn.close();
+
+        internalInferenceEngine.refreshGraph();
+
+        conn = repository.getConnection();
+
+        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:friendOf lit:Bob.}";
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:friendOf lit:James.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:friendOf lit:Jeff.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testTransitiveProp() throws Exception {
+        if(internalInferenceEngine == null) return; //infer not supported;
+
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Queens"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NYC")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "NYC"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NY")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "NY"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "US")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "US"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NorthAmerica")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "NorthAmerica"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "World")));
+        conn.commit();
+        conn.close();
+
+        internalInferenceEngine.refreshGraph();
+
+        conn = repository.getConnection();
+
+        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:subRegionOf lit:NorthAmerica.}";
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(4, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:subRegionOf lit:NY.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {lit:Queens lit:subRegionOf ?s.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(5, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {lit:NY lit:subRegionOf ?s.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testInverseOf() throws Exception {
+        if(internalInferenceEngine == null) return; //infer not supported;
+
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createURI(litdupsNS, "hasAlumnus")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Harvard"), vf.createURI(litdupsNS, "hasAlumnus"), vf.createURI(litdupsNS, "AlumC")));
+        conn.commit();
+        conn.close();
+
+        internalInferenceEngine.refreshGraph();
+
+        conn = repository.getConnection();
+
+        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {lit:Harvard lit:hasAlumnus ?s.}";
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s lit:degreeFrom lit:Harvard.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testSubClassOf() throws Exception {
+        if(internalInferenceEngine == null) return; //infer not supported;
+
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Student")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Student"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Person")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), RDF.TYPE, vf.createURI(litdupsNS, "UndergraduateStudent")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB"), RDF.TYPE, vf.createURI(litdupsNS, "Student")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "PersonC"), RDF.TYPE, vf.createURI(litdupsNS, "Person")));
+        conn.commit();
+        conn.close();
+
+        internalInferenceEngine.refreshGraph();
+
+        conn = repository.getConnection();
+
+        //simple api first
+        RepositoryResult<Statement> person = conn.getStatements(null, RDF.TYPE, vf.createURI(litdupsNS, "Person"), true);
+        int count = 0;
+        while (person.hasNext()) {
+            count++;
+            person.next();
+        }
+        person.close();
+        assertEquals(3, count);
+
+        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s rdf:type lit:Person.}";
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s rdf:type lit:Student.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select * where {?s rdf:type lit:UndergraduateStudent.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testSameAs() throws Exception {
+        if(internalInferenceEngine == null) return; //infer not supported;
+
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA2")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA3")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB2")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB3")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), vf.createURI(litdupsNS, "pred1"), vf.createURI(litdupsNS, "StudentB3")));
+        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), vf.createURI(litdupsNS, "pred2"), vf.createURI(litdupsNS, "StudentA3")));
+        conn.commit();
+        conn.close();
+
+        internalInferenceEngine.refreshGraph();
+
+        conn = repository.getConnection();
+
+        // query where finds sameAs for obj, pred specified
+        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select ?s where {?s lit:pred1 lit:StudentB2.}";
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        // query where finds sameAs for obj only specified
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select ?s where {?s ?p lit:StudentB2.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount()); // including sameAs assertions
+
+        // query where finds sameAs for subj, pred specified
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select ?s where {lit:StudentB2 lit:pred2 ?s.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount()); // including sameAs assertions
+
+        // query where finds sameAs for subj only specified
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select ?s where {lit:StudentB2 ?p ?s.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount()); // including sameAs assertions
+
+        // query where finds sameAs for subj, obj specified
+        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
+                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
+                "PREFIX lit: <" + litdupsNS + ">\n" +
+                "select ?s where {lit:StudentB2 ?s lit:StudentA2.}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount()); 
+
+        conn.close();
+    }
+
+    public void testNamedGraphLoad() throws Exception {
+        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
+        assertNotNull(stream);
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(stream, "", RDFFormat.TRIG);
+        conn.commit();
+
+        String query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
+                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
+                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
+                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
+                "\n" +
+                "SELECT * \n" +
+//                "FROM NAMED <http://www.example.org/exampleDocument#G1>\n" +
+                "WHERE\n" +
+                "{\n" +
+                "  GRAPH ex:G1\n" +
+                "  {\n" +
+                "    ?m voc:name ?name ;\n" +
+                "           voc:homepage ?hp .\n" +
+                "  } .\n" +
+                " GRAPH ex:G2\n" +
+                "  {\n" +
+                "    ?m voc:hasSkill ?skill .\n" +
+                "  } .\n" +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        assertEquals(1, tupleHandler.getCount());
+
+        query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
+                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
+                "PREFIX  swp:  <http://www.w3.org/2004/03/trix/swp-1/>\n" +
+                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
+                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
+                "\n" +
+                "SELECT * \n" +
+                "WHERE\n" +
+                "{\n" +
+                "  GRAPH ex:G3\n" +
+                "  {\n" +
+                "    ?g swp:assertedBy ?w .\n" +
+                "    ?w swp:authority ex:Tom .\n" +
+                "  } .\n" +
+                "  GRAPH ?g\n" +
+                "  {\n" +
+                "    ?m voc:name ?name .\n" +
+                "  } .\n" +
+                "}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
+                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
+                "PREFIX  swp:  <http://www.w3.org/2004/03/trix/swp-1/>\n" +
+                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
+                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
+                "\n" +
+                "SELECT * \n" +
+                "WHERE\n" +
+                "{\n" +
+                "  GRAPH ?g\n" +
+                "  {\n" +
+                "    ?m voc:name ?name .\n" +
+                "  } .\n" +
+                "}";
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.setBinding(BINDING_DISP_QUERYPLAN, VALUE_FACTORY.createLiteral(true));
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testNamedGraphLoad2() throws Exception {
+        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
+        assertNotNull(stream);
+        RepositoryConnection conn = repository.getConnection();
+        conn.add(stream, "", RDFFormat.TRIG);
+        conn.commit();
+
+        RepositoryResult<Statement> statements = conn.getStatements(null, vf.createURI("http://www.example.org/vocabulary#name"), null, true, vf.createURI("http://www.example.org/exampleDocument#G1"));
+        int count = 0;
+        while (statements.hasNext()) {
+            statements.next();
+            count++;
+        }
+        statements.close();
+        assertEquals(1, count);
+
+        conn.close();
+    }
+
+//    public void testNamedGraphLoadWInlineAuth() throws Exception {
+//        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
+//        assertNotNull(stream);
+//        URI auth1 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "1");
+//        RepositoryConnection conn = repository.getConnection();
+//        conn.add(stream, "", RDFFormat.TRIG, auth1);
+//        conn.commit();
+//
+//        String query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
+//                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
+//                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
+//                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
+//                "\n" +
+//                "SELECT * \n" +
+//                "WHERE\n" +
+//                "{\n" +
+//                "  GRAPH ex:G1\n" +
+//                "  {\n" +
+//                "    ?m voc:name ?name ;\n" +
+//                "           voc:homepage ?hp .\n" +
+//                "  } .\n" +
+//                " GRAPH ex:G2\n" +
+//                "  {\n" +
+//                "    ?m voc:hasSkill ?skill .\n" +
+//                "  } .\n" +
+//                "}";
+//        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("1"));
+//        CountTupleHandler tupleHandler = new CountTupleHandler();
+//        tupleQuery.evaluate(tupleHandler);
+//        assertEquals(1, tupleHandler.getCount());
+//
+//        query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
+//                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
+//                "PREFIX  swp:  <http://www.w3.org/2004/03/trix/swp-1/>\n" +
+//                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
+//                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
+//                "\n" +
+//                "SELECT * \n" +
+//                "WHERE\n" +
+//                "{\n" +
+//                "  GRAPH ex:G3\n" +
+//                "  {\n" +
+//                "    ?g swp:assertedBy ?w .\n" +
+//                "    ?w swp:authority ex:Tom .\n" +
+//                "  } .\n" +
+//                "  GRAPH ?g\n" +
+//                "  {\n" +
+//                "    ?m voc:name ?name .\n" +
+//                "  } .\n" +
+//                "}";
+//
+//        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleHandler = new CountTupleHandler();
+//        tupleQuery.evaluate(tupleHandler);
+//        assertEquals(0, tupleHandler.getCount());
+//
+//        conn.close();
+//    }
+
+    public void testNamedGraphLoadWAuth() throws Exception {
+        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
+        assertNotNull(stream);
+
+        RdfCloudTripleStore tstore = new MockRdfCloudStore();
+        NamespaceManager nm = new NamespaceManager(tstore.getRyaDAO(), tstore.getConf());
+        tstore.setNamespaceManager(nm);
+        SailRepository repo = new SailRepository(tstore);
+        tstore.getRyaDAO().getConf().setCv("1|2");
+        repo.initialize();
+
+        RepositoryConnection conn = repo.getConnection();
+        conn.add(stream, "", RDFFormat.TRIG);
+        conn.commit();
+
+        String query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
+                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
+                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
+                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
+                "\n" +
+                "SELECT * \n" +
+//                "FROM NAMED <http://www.example.org/exampleDocument#G1>\n" +
+                "WHERE\n" +
+                "{\n" +
+                "  GRAPH ex:G1\n" +
+                "  {\n" +
+                "    ?m voc:name ?name ;\n" +
+                "           voc:homepage ?hp .\n" +
+                "  } .\n" +
+                " GRAPH ex:G2\n" +
+                "  {\n" +
+                "    ?m voc:hasSkill ?skill .\n" +
+                "  } .\n" +
+                "}";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("2"));
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); //no auth
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(0, tupleHandler.getCount());
+
+        conn.close();
+
+        repo.shutDown();
+    }
+
+    public void testInsertDeleteData() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "INSERT DATA\n" +
+                "{ <http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}";
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "select * where { <http://example/book3> ?p ?o. }";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        String delete = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "\n" +
+                "DELETE DATA\n" +
+                "{ <http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, delete);
+        update.execute();
+
+        query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "select * where { <http://example/book3> ?p ?o. }";
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(0, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public void testUpdateData() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "INSERT DATA\n" +
+                "{ GRAPH ex:G1 {\n" +
+                "<http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}\n" +
+                "}";
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "select * where { <http://example/book3> ?p ?o. }";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        String insdel = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "\n" +
+                "WITH <http://example/addresses#G1>\n" +
+                "DELETE { ?book dc:title ?title }\n" +
+                "INSERT { ?book dc:title \"A newer book\"." +
+                "         ?book dc:add \"Additional Info\" }\n" +
+                "WHERE\n" +
+                "  { ?book dc:creator \"A.N.Other\" ;\n" +
+                "        dc:title ?title .\n" +
+                "  }";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, insdel);
+        update.execute();
+
+        query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "select * where { GRAPH ex:G1 {<http://example/book3> ?p ?o. } }";
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+
+        conn.close();
+    }
+    
+    public void testClearGraph() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "INSERT DATA\n" +
+                "{ GRAPH ex:G1 {\n" +
+                "<http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}\n" +
+                "}";
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "INSERT DATA\n" +
+                "{ GRAPH ex:G2 {\n" +
+                "<http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}\n" +
+                "}";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "select * where { <http://example/book3> ?p ?o. }";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(4, tupleHandler.getCount());
+        
+        tupleHandler = new CountTupleHandler();
+        conn.clear(new URIImpl("http://example/addresses#G2"));
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        tupleHandler = new CountTupleHandler();
+        conn.clear(new URIImpl("http://example/addresses#G1"));
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(0, tupleHandler.getCount());
+
+        conn.close();
+    }
+    
+    public void testClearAllGraph() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "INSERT DATA\n" +
+                "{ GRAPH ex:G1 {\n" +
+                "<http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}\n" +
+                "}";
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "INSERT DATA\n" +
+                "{ GRAPH ex:G2 {\n" +
+                "<http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}\n" +
+                "}";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "select * where { <http://example/book3> ?p ?o. }";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(4, tupleHandler.getCount());
+        
+        tupleHandler = new CountTupleHandler();
+        conn.clear();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(0, tupleHandler.getCount());
+
+        conn.close();
+    }
+    
+    public void testDropGraph() throws Exception {
+        RepositoryConnection conn = repository.getConnection();
+
+        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "INSERT DATA\n" +
+                "{ GRAPH ex:G1 {\n" +
+                "<http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}\n" +
+                "}";
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "PREFIX ex: <http://example/addresses#>\n" +
+                "INSERT DATA\n" +
+                "{ GRAPH ex:G2 {\n" +
+                "<http://example/book3> dc:title    \"A new book\" ;\n" +
+                "                         dc:creator  \"A.N.Other\" .\n" +
+                "}\n" +
+                "}";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
+        update.execute();
+
+        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
+                "select * where { <http://example/book3> ?p ?o. }";
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(4, tupleHandler.getCount());
+        
+        tupleHandler = new CountTupleHandler();
+        String drop = "PREFIX ex: <http://example/addresses#>\n" +
+                "DROP GRAPH ex:G2 ";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, drop);
+        update.execute();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+
+        tupleHandler = new CountTupleHandler();
+        drop = "PREFIX ex: <http://example/addresses#>\n" +
+                "DROP GRAPH ex:G1 ";
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, drop);
+        update.execute();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(0, tupleHandler.getCount());
+
+        conn.close();
+    }
+
+    public static class CountTupleHandler implements TupleQueryResultHandler {
+
+        int count = 0;
+
+        @Override
+        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
+            count++;
+        }
+
+        public int getCount() {
+            return count;
+        }
+
+        @Override
+        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
+        }
+
+        @Override
+        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
+        }
+    }
+
+    private static class PrintTupleHandler implements TupleQueryResultHandler {
+        
+
+        @Override
+        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
+            System.out.println(bindingSet);
+        }
+
+        @Override
+        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
+        }
+
+        @Override
+        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
+        }
+    }
+
+    public class MockRdfCloudStore extends RdfCloudTripleStore {
+
+        public MockRdfCloudStore() {
+            super();
+            Instance instance = new MockInstance();
+            try {
+                AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+                setConf(conf);
+                Connector connector = instance.getConnector("", "");
+                AccumuloRyaDAO cdao = new AccumuloRyaDAO();
+                cdao.setConf(conf);
+                cdao.setConnector(connector);
+                setRyaDAO(cdao);
+                inferenceEngine = new InferenceEngine();
+                inferenceEngine.setRyaDAO(cdao);
+                inferenceEngine.setRefreshGraphSchedule(5000); //every 5 sec
+                inferenceEngine.setConf(conf);
+                setInferenceEngine(inferenceEngine);
+                internalInferenceEngine = inferenceEngine;
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+}


[10/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java
deleted file mode 100644
index da9bdf6..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java
+++ /dev/null
@@ -1,698 +0,0 @@
-package mvm.rya;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import junit.framework.TestCase;
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.rdftriplestore.RdfCloudTripleStore;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.openrdf.model.Namespace;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.*;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.RepositoryResult;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-
-import javax.xml.datatype.DatatypeConfigurationException;
-import javax.xml.datatype.DatatypeFactory;
-import java.util.GregorianCalendar;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Class PartitionConnectionTest
- * Date: Jul 6, 2011
- * Time: 5:24:07 PM
- */
-public class RdfCloudTripleStoreTest extends TestCase {
-    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
-    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
-    public static final String HBNAMESPACE = "http://here/2010/tracked-data-provenance/heartbeat/ns#";
-    public static final String HB_TIMESTAMP = HBNAMESPACE + "timestamp";
-
-    private SailRepository repository;
-    private SailRepositoryConnection connection;
-
-    ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    private String objectUuid = "objectuuid1";
-    private String ancestor = "ancestor1";
-    private String descendant = "descendant1";
-    private static final long START = 1309532965000l;
-    private static final long END = 1310566686000l;
-    private Connector connector;
-
-    @Override
-    protected void setUp() throws Exception {
-        super.setUp();
-        connector = new MockInstance().getConnector("", "");
-
-        RdfCloudTripleStore sail = new RdfCloudTripleStore();
-        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-        conf.setTablePrefix("lubm_");
-        sail.setConf(conf);
-        AccumuloRyaDAO crdfdao = new AccumuloRyaDAO();
-        crdfdao.setConnector(connector);
-        crdfdao.setConf(conf);
-        sail.setRyaDAO(crdfdao);
-
-        repository = new SailRepository(sail);
-        repository.initialize();
-        connection = repository.getConnection();
-
-        loadData();
-    }
-
-    private void loadData() throws RepositoryException, DatatypeConfigurationException {
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, objectUuid), vf.createURI(NAMESPACE, "name"), vf.createLiteral("objUuid")));
-        //created
-        String uuid = "uuid1";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit1")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit2")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit3")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit4")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit1")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit2")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit3")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))));
-        //clicked
-        uuid = "uuid2";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Clicked")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "clickedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:B")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 2, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 3, 0, 0, 0))));
-        //deleted
-        uuid = "uuid3";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Deleted")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "deletedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:C")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 4, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 5, 0, 0, 0))));
-        //dropped
-        uuid = "uuid4";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Dropped")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "droppedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:D")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 6, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 7, 0, 0, 0))));
-        //received
-        uuid = "uuid5";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Received")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "receivedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:E")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 8, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 9, 0, 0, 0))));
-        //sent
-        uuid = "uuid6";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Sent")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "sentItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:F")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 10, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 11, 0, 0, 0))));
-        //stored
-        uuid = "uuid7";
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Stored")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "storedItem"), vf.createURI(NAMESPACE, objectUuid)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:G")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 12, 0, 0, 0))));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 13, 0, 0, 0))));
-
-        //derivedFrom
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "derivedFrom"), vf.createURI(NAMESPACE, ancestor)));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "name"), vf.createLiteral("descendantOne")));
-        connection.add(new StatementImpl(vf.createURI(NAMESPACE, ancestor), vf.createURI(NAMESPACE, "name"), vf.createLiteral("ancestor1")));
-
-        //heartbeats
-        String hbuuid = "hbuuid1";
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 1) + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(1 + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:A")));
-        connection.add(new StatementImpl(vf.createURI("urn:system:A"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-
-        hbuuid = "hbuuid2";
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 2) + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(2 + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:B")));
-        connection.add(new StatementImpl(vf.createURI("urn:system:B"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-
-        hbuuid = "hbuuid3";
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 3) + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(3 + "")));
-        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:C")));
-        connection.add(new StatementImpl(vf.createURI("urn:system:C"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-
-        //Foreign Chars
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM))); 
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD))); 
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_TH))); 
-        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_RN))); 
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM)));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD)));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_TH)));
-        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_RN)));
-        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM)));
-        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD)));
-        
-        connection.commit();
-    }
-
-    private static final String FAN_CH_SIM = "风扇";
-    private static final String FAN_CH_TRAD = "風扇";
-    private static final String FAN_TH = "แฟน";
-    private static final String FAN_RN = "вентилятор";
-    
-    @Override
-    protected void tearDown() throws Exception {
-        super.tearDown();
-        connection.close();
-        repository.shutDown();
-    }
-
-    protected String getXmlDate(long ts) throws DatatypeConfigurationException {
-        GregorianCalendar gregorianCalendar = new GregorianCalendar();
-        gregorianCalendar.setTimeInMillis(ts);
-        //"2011-07-12T05:12:00.000Z"^^xsd:dateTime
-        return "\"" + vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(gregorianCalendar)).stringValue() + "\"^^xsd:dateTime";
-    }
-
-//    public void testScanAll() throws Exception {
-//        Scanner sc = connector.createScanner("lubm_spo", Constants.NO_AUTHS);
-//        for (Map.Entry<Key, Value> aSc : sc) System.out.println(aSc.getKey().getRow());
-//    }
-
-    public void testNamespace() throws Exception {
-        String namespace = "urn:testNamespace#";
-        String prefix = "pfx";
-        connection.setNamespace(prefix, namespace);
-
-        assertEquals(namespace, connection.getNamespace(prefix));
-    }
-
-    public void testValues() throws Exception {
-      String query = "SELECT DISTINCT ?entity WHERE {"
-              + "VALUES (?entity) { (<http://test/entity>) }" 
-              + "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-    
-    public void testGetNamespaces() throws Exception {
-        String namespace = "urn:testNamespace#";
-        String prefix = "pfx";
-        connection.setNamespace(prefix, namespace);
-
-        namespace = "urn:testNamespace2#";
-        prefix = "pfx2";
-        connection.setNamespace(prefix, namespace);
-
-        RepositoryResult<Namespace> result = connection.getNamespaces();
-        int count = 0;
-        while (result.hasNext()) {
-            result.next();
-            count++;
-        }
-
-        assertEquals(2, count);
-    }
-
-    public void testAddCommitStatement() throws Exception {
-        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        connection.add(stmt);
-        connection.commit();
-    }
-
-    public void testSelectOnlyQuery() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "ns:uuid1 ns:createdItem ?cr.\n" +
-                "ns:uuid1 ns:reportedAt ?ra.\n" +
-                "ns:uuid1 ns:performedAt ?pa.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testForeignSelectOnlyQuery() throws Exception {
-        String query;
-        query = "select * where { ?s <urn:pred> ?o }"; // hits po
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(20, tupleHandler.getCount());
-
-        query = "select * where { <urn:subj1> <urn:pred> ?o }"; //hits spo
-        tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(8, tupleHandler.getCount());
-
-        query = "select * where { ?s ?p '"+FAN_CH_SIM+"' }"; //hits osp
-        tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-}
-      
-
-
-    //provenance Queries//////////////////////////////////////////////////////////////////////
-
-    public void testEventInfo() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "   ns:uuid1 ?p ?o.\n" +
-                "}\n";
-
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(12, tupleHandler.getCount());
-    }
-
-    public void testAllAncestors() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "ns:" + descendant + " ns:derivedFrom ?dr.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        //        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testAllDescendants() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "?ds ns:derivedFrom ns:" + ancestor + ".\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testEventsForUri() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
-                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                "select * where {\n" +
-                "{" +
-                "   ?s rdf:type ns:Created.\n" +
-                "   ?s ns:createdItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Clicked.\n" +
-                "   ?s ns:clickedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Deleted.\n" +
-                "   ?s ns:deletedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Dropped.\n" +
-                "   ?s ns:droppedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Received.\n" +
-                "   ?s ns:receivedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Stored.\n" +
-                "   ?s ns:storedItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Sent.\n" +
-                "   ?s ns:sentItem ns:objectuuid1.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(7, tupleHandler.getCount());
-    }
-
-    public void testAllEvents() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
-                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                "select * where {\n" +
-                "{" +
-                "   ?s rdf:type ns:Created.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Clicked.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Deleted.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Dropped.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Received.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Stored.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "UNION {" +
-                "   ?s rdf:type ns:Sent.\n" +
-                "   ?s ns:performedBy ?pb.\n" +
-                "   ?s ns:performedAt ?pa.\n" +
-                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//                tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(7, tupleHandler.getCount());
-//        System.out.println(tupleHandler.getCount());
-    }
-
-    public void testEventsBtwnSystems() throws Exception {  //TODO: How to do XMLDateTime ranges
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
-                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                "select * where {\n" +
-                "   ?sendEvent rdf:type ns:Sent;\n" +
-                "              ns:sentItem ?objUuid;\n" +
-                "              ns:performedBy <urn:system:F>;\n" +
-                "              ns:performedAt ?spa.\n" +
-                "   ?recEvent rdf:type ns:Received;\n" +
-                "              ns:receivedItem ?objUuid;\n" +
-                "              ns:performedBy <urn:system:E>;\n" +
-                "              ns:performedAt ?rpa.\n" +
-//                "   FILTER(mvm:range(?spa, \"2011-07-12T05:12:00.000Z\"^^xsd:dateTime, \"2011-07-12T07:12:00.000Z\"^^xsd:dateTime))\n" +
-                "   FILTER(mvm:range(?spa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "   FILTER(mvm:range(?rpa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
-//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
-//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testHeartbeatCounts() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                "PREFIX hns:<" + HBNAMESPACE + ">\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
-                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                "select * where {\n" +
-                "   ?hb rdf:type hns:HeartbeatMeasurement;\n" +
-                "              hns:count ?count;\n" +
-                "              hns:timestamp ?ts;\n" +
-                "              hns:systemName ?systemName.\n" +
-                "   FILTER(mvm:range(?ts, \"" + START + "\", \"" + (START + 3) + "\"))\n" +
-                "}\n";
-//        System.out.println(query);
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, vf.createLiteral(true));
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-    }
-
-    //provenance Queries//////////////////////////////////////////////////////////////////////
-
-    public void testCreatedEvents() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "   ?s ns:createdItem ns:objectuuid1.\n" +
-                "   ?s ns:reportedAt ?ra.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testSelectAllAfterFilter() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "   ?s ns:createdItem ns:objectuuid1.\n" +
-                "   ?s ?p ?o.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(12, tupleHandler.getCount());
-    }
-
-    public void testFilterQuery() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "ns:uuid1 ns:createdItem ?cr.\n" +
-                "ns:uuid1 ns:stringLit ?sl.\n" +
-                "FILTER regex(?sl, \"stringLit1\")" +
-                "ns:uuid1 ns:reportedAt ?ra.\n" +
-                "ns:uuid1 ns:performedAt ?pa.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        //        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testMultiplePredicatesMultipleBindingSets() throws Exception {
-        //MMRTS-121
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "select * where {\n" +
-                "?id ns:createdItem ns:objectuuid1.\n" +
-                "?id ns:stringLit ?sl.\n" +
-                "?id ns:strLit1 ?s2.\n" +
-                "}\n";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(12, tupleHandler.getCount());
-    }
-
-    public void testMultiShardLookupTimeRange() throws Exception {
-        //MMRTS-113
-        String query = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "SELECT * WHERE\n" +
-                "{\n" +
-                "?id hb:timestamp ?timestamp.\n" +
-//                "FILTER(mvmpart:timeRange(?id, hb:timestamp, " + START + " , " + (START + 2) + " , 'TIMESTAMP'))\n" +
-                "?id hb:count ?count.\n" +
-                "?system hb:heartbeat ?id.\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-    }
-
-    public void testMultiShardLookupTimeRangeValueConst() throws Exception {
-        //MMRTS-113
-        String query = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                "SELECT * WHERE\n" +
-                "{\n" +
-                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:timestamp ?timestamp.\n" +
-//                "FILTER(mvmpart:timeRange(<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2>, hb:timestamp, " + START + " , " + END + " , 'TIMESTAMP'))\n" +
-                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:count ?count.\n" +
-                "?system hb:heartbeat <http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2>.\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testLinkQuery() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "SELECT * WHERE {\n" +
-                "     <http://here/2010/tracked-data-provenance/ns#uuid1> ns:createdItem ?o .\n" +
-                "     ?o ns:name ?n .\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-    }
-
-    public void testRangeOverDuplicateItems() throws Exception {
-        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
-                "SELECT * WHERE {\n" +
-                "     ?subj <urn:pred> \"obj2\" .\n" +
-                "}";
-        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-    }
-
-    private static class PrintTupleHandler implements TupleQueryResultHandler {
-
-        @Override
-        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-
-        }
-
-        @Override
-        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-            System.out.println(bindingSet);
-        }
-
-        @Override
-        public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException {
-        }
-
-        @Override
-        public void handleLinks(List<String> paramList) throws QueryResultHandlerException {
-        }
-    }
-
-    private static class CountTupleHandler implements TupleQueryResultHandler {
-
-        int count = 0;
-
-        @Override
-        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-            count++;
-        }
-
-        public int getCount() {
-            return count;
-        }
-
-        @Override
-        public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException {
-        }
-
-        @Override
-        public void handleLinks(List<String> paramList) throws QueryResultHandlerException {
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java
deleted file mode 100644
index c693090..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java
+++ /dev/null
@@ -1,86 +0,0 @@
-//package mvm.rya;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-//
-//import java.util.List;
-//
-//import junit.framework.TestCase;
-//
-//import org.openrdf.model.BNode;
-//import org.openrdf.model.Resource;
-//import org.openrdf.model.URI;
-//import org.openrdf.model.Value;
-//import org.openrdf.model.impl.ValueFactoryImpl;
-//
-//import com.google.common.io.ByteStreams;
-//
-//import static mvm.rya.api.RdfCloudTripleStoreUtils.*;
-//
-//public class RdfCloudTripleStoreUtilsTest extends TestCase {
-//
-//	public void testWriteReadURI() throws Exception {
-//		final ValueFactoryImpl vf = new ValueFactoryImpl();
-//		URI uri = vf.createURI("http://www.example.org/test/rel");
-//		byte[] value = writeValue(uri);
-//
-//		Value readValue = readValue(ByteStreams
-//				.newDataInput(value), vf);
-//		assertEquals(uri, readValue);
-//	}
-//
-//	public void testWriteReadBNode() throws Exception {
-//		final ValueFactoryImpl vf = new ValueFactoryImpl();
-//		Value val = vf.createBNode("bnodeid");
-//		byte[] value = writeValue(val);
-//
-//		Value readValue = readValue(ByteStreams
-//				.newDataInput(value), vf);
-//		assertEquals(val, readValue);
-//	}
-//
-//	public void testWriteReadLiteral() throws Exception {
-//		final ValueFactoryImpl vf = new ValueFactoryImpl();
-//		Value val = vf.createLiteral("myliteral");
-//		byte[] value = writeValue(val);
-//
-//		Value readValue = readValue(ByteStreams
-//				.newDataInput(value), vf);
-//		assertEquals(val, readValue);
-//	}
-//
-//	public void testContexts() throws Exception {
-//		final ValueFactoryImpl vf = new ValueFactoryImpl();
-//		BNode cont1 = vf.createBNode("cont1");
-//		BNode cont2 = vf.createBNode("cont2");
-//		BNode cont3 = vf.createBNode("cont3");
-//
-//		byte[] cont_bytes = writeContexts(cont1, cont2,
-//				cont3);
-//		final String cont = new String(cont_bytes);
-//		System.out.println(cont);
-//
-//		List<Resource> contexts = readContexts(cont_bytes,
-//				vf);
-//		for (Resource resource : contexts) {
-//			System.out.println(resource);
-//		}
-//	}
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java
deleted file mode 100644
index e92277c..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java
+++ /dev/null
@@ -1,991 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.layout.TablePrefixLayoutStrategy;
-import mvm.rya.api.persist.RdfEvalStatsDAO;
-import mvm.rya.joinselect.AccumuloSelectivityEvalDAO;
-import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO;
-import mvm.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer;
-import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.BatchWriter;
-import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Mutation;
-import org.apache.accumulo.core.data.Range;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.security.Authorizations;
-import org.apache.hadoop.io.Text;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer;
-import org.openrdf.query.parser.ParsedQuery;
-import org.openrdf.query.parser.sparql.SPARQLParser;
-
-public class QueryJoinSelectOptimizerTest {
-
-  private static final String DELIM = "\u0000";
-  private final byte[] EMPTY_BYTE = new byte[0];
-  private final Value EMPTY_VAL = new Value(EMPTY_BYTE);
-
-  private String q1 = ""//
-      + "SELECT ?h  " //
-      + "{" //
-      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-      + "  ?h <uri:barksAt> <uri:cat> ."//
-      + "  ?h <uri:peesOn> <uri:hydrant> . "//
-      + "}";//
-
-  private String Q1 = ""//
-      + "SELECT ?h  " //
-      + "{" //
-      + "  ?h <uri:peesOn> <uri:hydrant> . "//
-      + "  ?h <uri:barksAt> <uri:cat> ."//
-      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-      + "}";//
-
-  private String q2 = ""//
-      + "SELECT ?h ?l ?m" //
-      + "{" //
-      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-      + "  ?h <uri:barksAt> <uri:cat> ."//
-      + "  ?h <uri:peesOn> <uri:hydrant> . "//
-      + "  ?m <uri:eats>  <uri:chickens>. " //
-      + "  ?m <uri:scratches> <uri:ears>. " //
-      + "}";//
-
-  private String Q2 = ""//
-      + "SELECT ?h ?l ?m" //
-      + "{" //
-      + "  ?h <uri:peesOn> <uri:hydrant> . "//
-      + "  ?h <uri:barksAt> <uri:cat> ."//
-      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-      + "  ?m <uri:eats>  <uri:chickens>. " //
-      + "  ?m <uri:scratches> <uri:ears>. " //
-      + "}";//
-
-  private String q3 = ""//
-      + "SELECT ?h ?l ?m" //
-      + "{" //
-      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-      + "  ?h <uri:barksAt> <uri:cat> ."//
-      + "  ?h <uri:peesOn> <uri:hydrant> . "//
-      + "  {?m <uri:eats>  <uri:chickens>} OPTIONAL {?m <uri:scratches> <uri:ears>}. " //
-      + "  {?m <uri:eats>  <uri:kibble>. ?m <uri:watches> <uri:television>.} UNION {?m <uri:rollsIn> <uri:mud>}. " //
-      + "  ?l <uri:runsIn> <uri:field> ."//
-      + "  ?l <uri:smells> <uri:butt> ."//
-      + "  ?l <uri:eats> <uri:sticks> ."//
-      + "}";//
-
-  private String Q4 = ""//
-      + "SELECT ?h ?l ?m" //
-      + "{" //
-      + "  ?h <uri:barksAt> <uri:cat> ."//
-      + "  ?m <uri:scratches> <uri:ears>. " //
-      + "  ?m <uri:eats>  <uri:chickens>. " //
-      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-      + "  ?h <uri:peesOn> <uri:hydrant> . "//
-      + "}";//
-
-  private String q5 = ""//
-      + "SELECT ?h ?l ?m" //
-      + "{" //
-      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-      + "  ?h <uri:barksAt> <uri:cat> ."//
-      + "  ?h <uri:peesOn> <uri:hydrant> . "//
-      + "  {?m <uri:eats>  <uri:kibble>. ?m <uri:watches> <uri:television>.?m <uri:eats>  <uri:chickens>} " + "  UNION {?m <uri:rollsIn> <uri:mud>}. " //
-      + "  ?l <uri:runsIn> <uri:field> ."//
-      + "  ?l <uri:smells> <uri:butt> ."//
-      + "  ?l <uri:eats> <uri:sticks> ."//
-      + "}";//
-  
-  
-  private String q6 = ""//
-          + "SELECT ?h ?l ?m" //
-          + "{" //
-          + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
-          + "  ?h <uri:barksAt> <uri:cat> ."//
-          + "  ?h <uri:peesOn> <uri:hydrant> . "//
-           + "  FILTER(?l = <uri:grover>) ." //
-          + "  {?m <uri:eats>  <uri:kibble>. ?m <uri:watches> <uri:television>.?m <uri:eats>  <uri:chickens>} " + "  UNION {?m <uri:rollsIn> <uri:mud>}. " //
-          + "  ?l <uri:runsIn> <uri:field> ."//
-          + "  ?l <uri:smells> <uri:butt> ."//
-          + "  ?l <uri:eats> <uri:sticks> ."//
-          + "}";//
-
-  private Connector conn;
-  AccumuloRdfConfiguration arc;
-  BatchWriterConfig config;
-  RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res;
-  Instance mock;
-
-  @Before
-  public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
-
-    mock = new MockInstance("accumulo");
-    PasswordToken pToken = new PasswordToken("pass".getBytes());
-    conn = mock.getConnector("user", pToken);
-
-    config = new BatchWriterConfig();
-    config.setMaxMemory(1000);
-    config.setMaxLatency(1000, TimeUnit.SECONDS);
-    config.setMaxWriteThreads(10);
-
-    if (conn.tableOperations().exists("rya_prospects")) {
-      conn.tableOperations().delete("rya_prospects");
-    }
-    if (conn.tableOperations().exists("rya_selectivity")) {
-      conn.tableOperations().delete("rya_selectivity");
-    }
-
-    arc = new AccumuloRdfConfiguration();
-    arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy());
-    arc.setMaxRangesForScanner(300);
-    res = new ProspectorServiceEvalStatsDAO(conn, arc);
-
-  }
-
-  @Test
-  public void testOptimizeQ1() throws Exception {
-
-    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-    accc.setConf(arc);
-    accc.setConnector(conn);
-    accc.setRdfEvalDAO(res);
-    accc.init();
-
-    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-    List<Mutation> mList = new ArrayList<Mutation>();
-    List<Mutation> mList2 = new ArrayList<Mutation>();
-    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
-    Mutation m1, m2, m3, m4;
-
-    m1 = new Mutation(s1 + DELIM + "3");
-    m1.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m2 = new Mutation(s2 + DELIM + "2");
-    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m3 = new Mutation(s3 + DELIM + "1");
-    m3.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-    mList.add(m1);
-    mList.add(m2);
-    mList.add(m3);
-
-    bw1.addMutations(mList);
-    bw1.close();
-
-    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getValue().get())));
-    }
-
-    m1 = new Mutation(s1);
-    m2 = new Mutation(s2);
-    m3 = new Mutation(s3);
-    m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-    m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-    int i = 2;
-    int j = 3;
-    int k = 4;
-    Long count1;
-    Long count2;
-    Long count3;
-
-    for (String s : sList) {
-      count1 = (long) i;
-      count2 = (long) j;
-      count3 = (long) k;
-      m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      i = 2 * i;
-      j = 2 * j;
-      k = 2 * k;
-    }
-    mList2.add(m1);
-    mList2.add(m2);
-    mList2.add(m3);
-    mList2.add(m4);
-    bw2.addMutations(mList2);
-    bw2.close();
-
-    scan = conn.createScanner("rya_selectivity", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-
-    }
-
-    TupleExpr te = getTupleExpr(q1);
-
-    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
-    System.out.println("Originial query is " + te);
-    qjs.optimize(te, null, null);
-    Assert.assertTrue(te.equals(getTupleExpr(Q1)));
-
-  }
-
-  @Test
-  public void testOptimizeQ2() throws Exception {
-
-    System.out.println("*********************QUERY2********************");
-
-    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-    accc.setConf(arc);
-    accc.setConnector(conn);
-    accc.setRdfEvalDAO(res);
-    accc.init();
-
-    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
-    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
-    List<Mutation> mList = new ArrayList<Mutation>();
-    List<Mutation> mList2 = new ArrayList<Mutation>();
-    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
-    Mutation m1, m2, m3, m4, m5, m6;
-
-    m1 = new Mutation(s1 + DELIM + "3");
-    m1.put(new Text("count"), new Text(""), new Value("4".getBytes()));
-    m2 = new Mutation(s2 + DELIM + "2");
-    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m3 = new Mutation(s3 + DELIM + "1");
-    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m4 = new Mutation(s4 + DELIM + "1");
-    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m5 = new Mutation(s5 + DELIM + "1");
-    m5.put(new Text("count"), new Text(""), new Value("5".getBytes()));
-    mList.add(m1);
-    mList.add(m2);
-    mList.add(m3);
-    mList.add(m4);
-    mList.add(m5);
-
-    bw1.addMutations(mList);
-    bw1.close();
-
-    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getValue().get())));
-    }
-
-    m1 = new Mutation(s1);
-    m2 = new Mutation(s2);
-    m3 = new Mutation(s3);
-    m4 = new Mutation(s4);
-    m5 = new Mutation(s5);
-    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-    int i = 2;
-    int j = 3;
-    int k = 4;
-    Long count1;
-    Long count2;
-    Long count3;
-
-    for (String s : sList) {
-      count1 = (long) i;
-      count2 = (long) j;
-      count3 = (long) k;
-      m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-
-      i = 2 * i;
-      j = 2 * j;
-      k = 2 * k;
-    }
-    mList2.add(m1);
-    mList2.add(m2);
-    mList2.add(m3);
-    mList2.add(m5);
-    mList2.add(m4);
-    mList2.add(m6);
-    bw2.addMutations(mList2);
-    bw2.close();
-
-    // scan = conn.createScanner("rya_selectivity" , new Authorizations());
-    // scan.setRange(new Range());
-    //
-    // for (Map.Entry<Key, Value> entry : scan) {
-    // System.out.println("Key row string is " + entry.getKey().getRow().toString());
-    // System.out.println("Key is " + entry.getKey());
-    // System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-    //
-    // }
-
-    TupleExpr te = getTupleExpr(q2);
-    System.out.println("Bindings are " + te.getBindingNames());
-    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
-    System.out.println("Originial query is " + te);
-    qjs.optimize(te, null, null);
-    System.out.println("Optimized query is " + te);
-    // System.out.println("Bindings are " + te.getBindingNames());
-    Assert.assertTrue(te.equals(getTupleExpr(Q2)));
-
-  }
-
-  @Test
-  public void testOptimizeQ3() throws Exception {
-
-    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-    accc.setConf(arc);
-    accc.setConnector(conn);
-    accc.setRdfEvalDAO(res);
-    accc.init();
-
-    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
-    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
-    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
-    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
-    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
-    String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt";
-    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
-
-    List<Mutation> mList = new ArrayList<Mutation>();
-    List<Mutation> mList2 = new ArrayList<Mutation>();
-    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
-    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11;
-
-    m1 = new Mutation(s1 + DELIM + "3");
-    m1.put(new Text("count"), new Text(""), new Value("5".getBytes()));
-    m2 = new Mutation(s2 + DELIM + "2");
-    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m3 = new Mutation(s3 + DELIM + "1");
-    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m4 = new Mutation(s4 + DELIM + "1");
-    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m5 = new Mutation(s5 + DELIM + "1");
-    m5.put(new Text("count"), new Text(""), new Value("5".getBytes()));
-    m6 = new Mutation(s6 + DELIM + "1");
-    m6.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m7 = new Mutation(s7 + DELIM + "1");
-    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m8 = new Mutation(s8 + DELIM + "1");
-    m8.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m9 = new Mutation(s9 + DELIM + "1");
-    m9.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-    m10 = new Mutation(s10 + DELIM + "1");
-    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-
-    mList.add(m1);
-    mList.add(m2);
-    mList.add(m3);
-    mList.add(m4);
-    mList.add(m5);
-    mList.add(m6);
-    mList.add(m7);
-    mList.add(m8);
-    mList.add(m9);
-    mList.add(m10);
-
-    bw1.addMutations(mList);
-    bw1.close();
-
-    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getValue().get())));
-    }
-
-    m1 = new Mutation(s1);
-    m2 = new Mutation(s2);
-    m3 = new Mutation(s3);
-    m4 = new Mutation(s4);
-    m5 = new Mutation(s5);
-    m6 = new Mutation(s6);
-    m7 = new Mutation(s7);
-    m8 = new Mutation(s8);
-    m9 = new Mutation(s9);
-    m10 = new Mutation(s10);
-    m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-    m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-    int i = 2;
-    int j = 3;
-    int k = 4;
-    int l = 5;
-    Long count1;
-    Long count2;
-    Long count3;
-    Long count4;
-
-    for (String s : sList) {
-      count1 = (long) i;
-      count2 = (long) j;
-      count3 = (long) k;
-      count4 = (long) l;
-      m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
-      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
-      m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-
-      i = 2 * i;
-      j = 2 * j;
-      k = 2 * k;
-      l = 2 * l;
-    }
-    mList2.add(m1);
-    mList2.add(m2);
-    mList2.add(m3);
-    mList2.add(m5);
-    mList2.add(m4);
-    mList2.add(m6);
-    mList2.add(m7);
-    mList2.add(m8);
-    mList2.add(m9);
-    mList2.add(m10);
-    mList2.add(m11);
-    bw2.addMutations(mList2);
-    bw2.close();
-
-    scan = conn.createScanner("rya_selectivity", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-
-    }
-
-    TupleExpr te = getTupleExpr(q3);
-    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
-    System.out.println("Originial query is " + te);
-    qjs.optimize(te, null, null);
-
-    System.out.print("Optimized query is " + te);
-
-  }
-
-  @Test
-  public void testOptimizeQ4() throws Exception {
-
-    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-    accc.setConf(arc);
-    accc.setConnector(conn);
-    accc.setRdfEvalDAO(res);
-    accc.init();
-
-    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
-    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
-    List<Mutation> mList = new ArrayList<Mutation>();
-    List<Mutation> mList2 = new ArrayList<Mutation>();
-    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
-    Mutation m1, m2, m3, m4, m5, m6;
-
-    m1 = new Mutation(s1 + DELIM + "3");
-    m1.put(new Text("count"), new Text(""), new Value("4".getBytes()));
-    m2 = new Mutation(s2 + DELIM + "2");
-    m2.put(new Text("count"), new Text(""), new Value("0".getBytes()));
-    m3 = new Mutation(s3 + DELIM + "1");
-    m3.put(new Text("count"), new Text(""), new Value("8".getBytes()));
-    m4 = new Mutation(s4 + DELIM + "1");
-    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m5 = new Mutation(s5 + DELIM + "1");
-    m5.put(new Text("count"), new Text(""), new Value("0".getBytes()));
-    mList.add(m1);
-    mList.add(m2);
-    mList.add(m3);
-    mList.add(m4);
-    mList.add(m5);
-
-    bw1.addMutations(mList);
-    bw1.close();
-
-    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getValue().get())));
-    }
-
-    m1 = new Mutation(s1);
-    m2 = new Mutation(s2);
-    m3 = new Mutation(s3);
-    m4 = new Mutation(s4);
-    m5 = new Mutation(s5);
-    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-    int i = 2;
-    int j = 3;
-    int k = 4;
-    Long count1;
-    Long count2;
-    Long count3;
-
-    for (String s : sList) {
-      count1 = (long) i;
-      count2 = (long) j;
-      count3 = (long) k;
-      m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-
-      i = 2 * i;
-      j = 2 * j;
-      k = 2 * k;
-    }
-    mList2.add(m1);
-    mList2.add(m2);
-    mList2.add(m3);
-    mList2.add(m5);
-    mList2.add(m4);
-    mList2.add(m6);
-    bw2.addMutations(mList2);
-    bw2.close();
-
-    scan = conn.createScanner("rya_selectivity", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-
-    }
-
-    TupleExpr te = getTupleExpr(q2);
-    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
-    System.out.println("Originial query is " + te);
-    qjs.optimize(te, null, null);
-    Assert.assertTrue(te.equals(getTupleExpr(Q4)));
-
-    System.out.print("Optimized query is " + te);
-
-  }
-
-  @Test
-  public void testOptimizeQ5() throws Exception {
-
-    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-    accc.setConf(arc);
-    accc.setConnector(conn);
-    accc.setRdfEvalDAO(res);
-    accc.init();
-
-    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-    String s5 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television";
-    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
-    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
-    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
-    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
-    String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt";
-    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
-
-    List<Mutation> mList = new ArrayList<Mutation>();
-    List<Mutation> mList2 = new ArrayList<Mutation>();
-    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
-    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11;
-
-    m1 = new Mutation(s1 + DELIM + "3");
-    m1.put(new Text("count"), new Text(""), new Value("5".getBytes()));
-    m2 = new Mutation(s2 + DELIM + "2");
-    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m3 = new Mutation(s3 + DELIM + "1");
-    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m4 = new Mutation(s4 + DELIM + "1");
-    m4.put(new Text("count"), new Text(""), new Value("0".getBytes()));
-    m5 = new Mutation(s5 + DELIM + "1");
-    m5.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-    m6 = new Mutation(s6 + DELIM + "1");
-    m6.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m7 = new Mutation(s7 + DELIM + "1");
-    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m8 = new Mutation(s8 + DELIM + "1");
-    m8.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m9 = new Mutation(s9 + DELIM + "1");
-    m9.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-    m10 = new Mutation(s10 + DELIM + "1");
-    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-
-    mList.add(m1);
-    mList.add(m2);
-    mList.add(m3);
-    mList.add(m4);
-    mList.add(m5);
-    mList.add(m6);
-    mList.add(m7);
-    mList.add(m8);
-    mList.add(m9);
-    mList.add(m10);
-
-    bw1.addMutations(mList);
-    bw1.close();
-
-    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getValue().get())));
-    }
-
-    m1 = new Mutation(s1);
-    m2 = new Mutation(s2);
-    m3 = new Mutation(s3);
-    m4 = new Mutation(s4);
-    m5 = new Mutation(s5);
-    m6 = new Mutation(s6);
-    m7 = new Mutation(s7);
-    m8 = new Mutation(s8);
-    m9 = new Mutation(s9);
-    m10 = new Mutation(s10);
-    m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-    m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-    int i = 2;
-    int j = 3;
-    int k = 4;
-    int l = 5;
-    Long count1;
-    Long count2;
-    Long count3;
-    Long count4;
-
-    for (String s : sList) {
-      count1 = (long) i;
-      count2 = (long) j;
-      count3 = (long) k;
-      count4 = (long) l;
-      m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
-      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
-      m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-
-      i = 2 * i;
-      j = 2 * j;
-      k = 2 * k;
-      l = 2 * l;
-    }
-    mList2.add(m1);
-    mList2.add(m2);
-    mList2.add(m3);
-    mList2.add(m5);
-    mList2.add(m4);
-    mList2.add(m6);
-    mList2.add(m7);
-    mList2.add(m8);
-    mList2.add(m9);
-    mList2.add(m10);
-    mList2.add(m11);
-    bw2.addMutations(mList2);
-    bw2.close();
-
-    scan = conn.createScanner("rya_selectivity", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-
-    }
-
-    TupleExpr te = getTupleExpr(q5);
-    System.out.println("Bindings are " + te.getBindingNames());
-    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
-    System.out.println("Originial query is " + te);
-    qjs.optimize(te, null, null);
-    System.out.println("Bindings are " + te.getBindingNames());
-
-    System.out.print("Optimized query is " + te);
-
-  }
-  
-  
-  
-  
-  
-  
-  
-  
-  @Test
-  public void testOptimizeQ6() throws Exception {
-
-    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
-    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
-    accc.setConf(arc);
-    accc.setConnector(conn);
-    accc.setRdfEvalDAO(res);
-    accc.init();
-
-    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
-    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
-
-    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
-    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
-    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
-    String s5 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television";
-    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
-    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
-    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
-    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
-    String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt";
-    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
-
-    List<Mutation> mList = new ArrayList<Mutation>();
-    List<Mutation> mList2 = new ArrayList<Mutation>();
-    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
-    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11;
-
-    m1 = new Mutation(s1 + DELIM + "3");
-    m1.put(new Text("count"), new Text(""), new Value("5".getBytes()));
-    m2 = new Mutation(s2 + DELIM + "2");
-    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m3 = new Mutation(s3 + DELIM + "1");
-    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m4 = new Mutation(s4 + DELIM + "1");
-    m4.put(new Text("count"), new Text(""), new Value("0".getBytes()));
-    m5 = new Mutation(s5 + DELIM + "1");
-    m5.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-    m6 = new Mutation(s6 + DELIM + "1");
-    m6.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m7 = new Mutation(s7 + DELIM + "1");
-    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
-    m8 = new Mutation(s8 + DELIM + "1");
-    m8.put(new Text("count"), new Text(""), new Value("3".getBytes()));
-    m9 = new Mutation(s9 + DELIM + "1");
-    m9.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-    m10 = new Mutation(s10 + DELIM + "1");
-    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
-
-    mList.add(m1);
-    mList.add(m2);
-    mList.add(m3);
-    mList.add(m4);
-    mList.add(m5);
-    mList.add(m6);
-    mList.add(m7);
-    mList.add(m8);
-    mList.add(m9);
-    mList.add(m10);
-
-    bw1.addMutations(mList);
-    bw1.close();
-
-    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getValue().get())));
-    }
-
-    m1 = new Mutation(s1);
-    m2 = new Mutation(s2);
-    m3 = new Mutation(s3);
-    m4 = new Mutation(s4);
-    m5 = new Mutation(s5);
-    m6 = new Mutation(s6);
-    m7 = new Mutation(s7);
-    m8 = new Mutation(s8);
-    m9 = new Mutation(s9);
-    m10 = new Mutation(s10);
-    m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
-    m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
-    int i = 2;
-    int j = 3;
-    int k = 4;
-    int l = 5;
-    Long count1;
-    Long count2;
-    Long count3;
-    Long count4;
-
-    for (String s : sList) {
-      count1 = (long) i;
-      count2 = (long) j;
-      count3 = (long) k;
-      count4 = (long) l;
-      m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
-      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
-      m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-      m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
-      m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
-      m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
-
-      i = 2 * i;
-      j = 2 * j;
-      k = 2 * k;
-      l = 2 * l;
-    }
-    mList2.add(m1);
-    mList2.add(m2);
-    mList2.add(m3);
-    mList2.add(m5);
-    mList2.add(m4);
-    mList2.add(m6);
-    mList2.add(m7);
-    mList2.add(m8);
-    mList2.add(m9);
-    mList2.add(m10);
-    mList2.add(m11);
-    bw2.addMutations(mList2);
-    bw2.close();
-
-    scan = conn.createScanner("rya_selectivity", new Authorizations());
-    scan.setRange(new Range());
-
-    for (Map.Entry<Key,Value> entry : scan) {
-      System.out.println("Key row string is " + entry.getKey().getRow().toString());
-      System.out.println("Key is " + entry.getKey());
-      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
-
-    }
-
-    TupleExpr te = getTupleExpr(q6);
-    TupleExpr te2 = (TupleExpr) te.clone();
-    System.out.println("Bindings are " + te.getBindingNames());
-    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
-    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
-    System.out.println("Originial query is " + te);
-    qjs.optimize(te, null, null);
-    
-    
-    
-    FilterOptimizer fo = new FilterOptimizer();
-    fo.optimize(te2, null, null);
-    System.out.print("filter optimized query before js opt is " + te2);
-    qjs.optimize(te2, null, null);
-
-    System.out.println("join selectivity opt query before filter opt is " + te);
-    fo.optimize(te, null, null);
-    
-    System.out.println("join selectivity opt query is " + te);
-    System.out.print("filter optimized query is " + te2);
-
-  }
-  
-  
-  
-  
-  
-  
-  
-  
-  
-  
-  
-  
-  
-
-  private TupleExpr getTupleExpr(String query) throws MalformedQueryException {
-
-    SPARQLParser sp = new SPARQLParser();
-    ParsedQuery pq = sp.parseQuery(query, null);
-
-    return pq.getTupleExpr();
-  }
-
-}


[18/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/pom.xml
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/pom.xml b/partition/web.partition.rdf/pom.xml
deleted file mode 100644
index 1c81587..0000000
--- a/partition/web.partition.rdf/pom.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>mvm.rya</groupId>
-		<artifactId>parent</artifactId>
-		<version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <groupId>mvm.mmrts.rdf</groupId>
-    <artifactId>web.partition.rdf</artifactId>
-    <packaging>war</packaging>
-    <version>1.0.0-SNAPSHOT</version>
-    <name>web.partition.rdf Maven Webapp</name>
-    <url>http://maven.apache.org</url>
-    <dependencies>
-        <dependency>
-            <groupId>mvm.mmrts.rdf</groupId>
-            <artifactId>partition.rdf</artifactId>
-            <version>1.0.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>commons-pool</groupId>
-            <artifactId>commons-pool</artifactId>
-            <version>1.5.1</version>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>4.8.1</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-    <build>
-        <finalName>partitionRdf</finalName>
-        <plugins>
-            <plugin>
-                <groupId>org.mortbay.jetty</groupId>
-                <artifactId>maven-jetty-plugin</artifactId>
-                <version>6.1.10</version>
-                <configuration>
-                    <contextPath>/partitionRdf</contextPath>
-                    <scanIntervalSeconds>10</scanIntervalSeconds>
-                    <stopKey>stopKey</stopKey>
-                    <stopPort>9081</stopPort>
-                    <connectors>
-                        <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector">
-                            <port>9080</port>
-                            <maxIdleTime>60000</maxIdleTime>
-                        </connector>
-                    </connectors>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <configuration>
-                    <source>1.6</source>
-                    <target>1.6</target>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/AbstractRDFWebServlet.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/AbstractRDFWebServlet.java b/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/AbstractRDFWebServlet.java
deleted file mode 100644
index b6732fd..0000000
--- a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/AbstractRDFWebServlet.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package mvm.cloud.rdf.web.partition;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import mvm.mmrts.rdf.partition.PartitionSail;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-
-/**
- * Class AbstractRDFWebServlet
- * Date: Dec 13, 2010
- * Time: 9:44:08 AM
- */
-public class AbstractRDFWebServlet extends HttpServlet implements RDFWebConstants {
-
-    protected Repository repository;
-
-    @Override
-    public void init(ServletConfig config) throws ServletException {
-        super.init(config);
-
-        try {
-            String instance = config.getInitParameter(INSTANCE_PARAM);
-            String zk = config.getInitParameter(ZK_PARAM);
-            String user = config.getInitParameter(USER_PARAM);
-            String password = config.getInitParameter(PASSWORD_PARAM);
-            String table = config.getInitParameter(TABLE_PARAM);
-            String shardtable = config.getInitParameter(SHARDTABLE_PARAM);
-            if (shardtable == null)
-                shardtable = table;
-
-            if (zk == null || instance == null || user == null || password == null || table == null)
-                throw new ServletException("Configuration not correct");
-
-            PartitionSail psail = new PartitionSail(instance, zk, user, password, table, shardtable);
-
-            repository = new SailRepository(psail);
-            repository.initialize();
-        } catch (Exception e) {
-            throw new ServletException(e);
-        }
-    }
-
-    @Override
-    public void destroy() {
-        try {
-            repository.shutDown();
-        } catch (RepositoryException e) {
-            e.printStackTrace();
-        }
-    }
-
-
-    public Repository getRepository() {
-        return repository;
-    }
-
-    public void setRepository(Repository repository) {
-        this.repository = repository;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/DeleteDataServlet.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/DeleteDataServlet.java b/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/DeleteDataServlet.java
deleted file mode 100644
index 7e1296a..0000000
--- a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/DeleteDataServlet.java
+++ /dev/null
@@ -1,46 +0,0 @@
-//package mvm.cloud.rdf.web.partition;
-//
-//import org.openrdf.query.QueryLanguage;
-//import org.openrdf.query.TupleQuery;
-//import org.openrdf.query.resultio.TupleQueryResultWriter;
-//import org.openrdf.repository.RepositoryConnection;
-//import org.openrdf.repository.RepositoryException;
-//
-//import javax.servlet.ServletException;
-//import javax.servlet.http.HttpServletRequest;
-//import javax.servlet.http.HttpServletResponse;
-//import java.io.IOException;
-//
-//public class DeleteDataServlet extends AbstractRDFWebServlet {
-//
-//    @Override
-//    protected void doGet(HttpServletRequest req, HttpServletResponse resp)
-//            throws ServletException, IOException {
-//        if (req == null || req.getInputStream() == null)
-//            return;
-//
-//        String query_s = req.getParameter("query");
-//
-//        RepositoryConnection conn = null;
-//        try {
-//            conn = repository.getConnection();
-//            // query data
-//            TupleQuery tupleQuery = conn.prepareTupleQuery(
-//                    QueryLanguage.SPARQL, query_s);
-//            TupleQueryResultWriter deleter = new mvm.mmrts.rdftriplestore.cloudbase.QueryResultsDeleter(conn);
-//            tupleQuery.evaluate(deleter);
-//
-//        } catch (Exception e) {
-//            throw new ServletException(e);
-//        } finally {
-//            if (conn != null) {
-//                try {
-//                    conn.close();
-//                } catch (RepositoryException e) {
-//
-//                }
-//            }
-//        }
-//    }
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/LoadDataServlet.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/LoadDataServlet.java b/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/LoadDataServlet.java
deleted file mode 100644
index 46fe0e7..0000000
--- a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/LoadDataServlet.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package mvm.cloud.rdf.web.partition;
-
-import org.openrdf.model.Resource;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.rio.RDFFormat;
-import org.openrdf.rio.RDFParseException;
-
-import javax.servlet.ServletException;
-import javax.servlet.ServletInputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-
-public class LoadDataServlet extends AbstractRDFWebServlet {
-
-    @Override
-    protected void doPost(HttpServletRequest req, HttpServletResponse resp)
-            throws ServletException, IOException {
-        if (req == null || req.getInputStream() == null)
-            return;
-
-        String format_s = req.getParameter("format");
-        RDFFormat format = RDFFormat.RDFXML;
-        if (format_s != null) {
-            format = RDFFormat.valueOf(format_s);
-            if (format == null)
-                throw new ServletException("RDFFormat[" + format_s + "] not found");
-        }
-        ServletInputStream stream = req.getInputStream();
-
-        RepositoryConnection conn = null;
-        try {
-            conn = repository.getConnection();
-
-            // generate data
-            conn.add(stream, "", format, new Resource[]{});
-            conn.commit();
-
-            conn.close();
-        } catch (RepositoryException e) {
-            throw new ServletException(e);
-        } catch (RDFParseException e) {
-            throw new ServletException(e);
-        } finally {
-            if (conn != null) {
-                try {
-                    conn.close();
-                } catch (RepositoryException e) {
-
-                }
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QueryDataServlet.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QueryDataServlet.java b/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QueryDataServlet.java
deleted file mode 100644
index d456d02..0000000
--- a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QueryDataServlet.java
+++ /dev/null
@@ -1,158 +0,0 @@
-package mvm.cloud.rdf.web.partition;
-
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.GraphQuery;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.rio.rdfxml.RDFXMLWriter;
-
-import javax.servlet.ServletException;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import java.io.PrintStream;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-public class QueryDataServlet extends AbstractRDFWebServlet {
-
-    private ValueFactory vf = new ValueFactoryImpl();
-
-    @Override
-    protected void doGet(HttpServletRequest req, HttpServletResponse resp)
-            throws ServletException, IOException {
-        if (req == null || req.getInputStream() == null)
-            return;
-
-        String query = req.getParameter("query");
-        String start = req.getParameter(START_BINDING);
-        String end = req.getParameter(END_BINDING);
-        String infer = req.getParameter("infer");
-        String performant = req.getParameter("performant");
-        String useStats = req.getParameter("useStats");
-        String timeUris = req.getParameter("timeUris");
-
-        System.out.println("Start[" + start + "] and End[" + end + "]");
-
-        //validate infer, performant
-        if (infer != null) {
-            Boolean.parseBoolean(infer);
-        } else if (performant != null) {
-            Boolean.parseBoolean(performant);
-        }
-
-        if (query == null) {
-            throw new ServletException("Please set a query");
-        }
-        if (query.toLowerCase().contains("select")) {
-            try {
-                performSelect(query, start, end, infer, performant, useStats, timeUris, resp);
-            } catch (Exception e) {
-                throw new ServletException(e);
-            }
-        } else if (query.toLowerCase().contains("construct")) {
-            try {
-                performConstruct(query, start, end, infer, performant, useStats, timeUris, resp);
-            } catch (Exception e) {
-                throw new ServletException(e);
-            }
-        } else {
-            throw new ServletException("Invalid SPARQL query: " + query);
-        }
-
-    }
-
-    private void performConstruct(String query, String start, String end, String infer, String performant, String useStats, String timeUris, HttpServletResponse resp)
-            throws Exception {
-        RepositoryConnection conn = null;
-        try {
-            ServletOutputStream os = resp.getOutputStream();
-            conn = repository.getConnection();
-
-            // query data
-            GraphQuery graphQuery = conn.prepareGraphQuery(
-                    QueryLanguage.SPARQL, query);
-            if (start != null && start.length() > 0)
-                graphQuery.setBinding(START_BINDING, vf.createLiteral(Long.parseLong(start)));
-            if (end != null && end.length() > 0)
-                graphQuery.setBinding(END_BINDING, vf.createLiteral(Long.parseLong(end)));
-            if (performant != null && performant.length() > 0)
-                graphQuery.setBinding("performant", vf.createLiteral(Boolean.parseBoolean(performant)));
-            if (infer != null && infer.length() > 0)
-                graphQuery.setBinding("infer", vf.createLiteral(Boolean.parseBoolean(infer)));
-            if (useStats != null && useStats.length() > 0)
-                graphQuery.setBinding("useStats", vf.createLiteral(Boolean.parseBoolean(useStats)));
-            if (timeUris != null && timeUris.length() > 0)
-                graphQuery.setBinding("timeUris", vf.createURI(timeUris));
-            RDFXMLWriter rdfWriter = new RDFXMLWriter(os);
-            graphQuery.evaluate(rdfWriter);
-
-        } catch (Exception e) {
-            resp.setStatus(500);
-            e.printStackTrace(new PrintStream(resp.getOutputStream()));
-            throw new ServletException(e);
-        } finally {
-            if (conn != null) {
-                try {
-                    conn.close();
-                } catch (RepositoryException e) {
-
-                }
-            }
-        }
-    }
-
-    private void performSelect(String query, String start, String end, String infer, String performant, String useStats, String timeUris, HttpServletResponse resp)
-            throws Exception {
-        RepositoryConnection conn = null;
-        try {
-            ServletOutputStream os = resp.getOutputStream();
-            conn = repository.getConnection();
-
-            // query data
-            TupleQuery tupleQuery = conn.prepareTupleQuery(
-                    QueryLanguage.SPARQL, query);
-            if (start != null && start.length() > 0)
-                tupleQuery.setBinding(START_BINDING, vf.createLiteral(Long.parseLong(start)));
-            if (end != null && end.length() > 0)
-                tupleQuery.setBinding(END_BINDING, vf.createLiteral(Long.parseLong(end)));
-            if (performant != null && performant.length() > 0)
-                tupleQuery.setBinding("performant", vf.createLiteral(Boolean.parseBoolean(performant)));
-            if (infer != null && infer.length() > 0)
-                tupleQuery.setBinding("infer", vf.createLiteral(Boolean.parseBoolean(infer)));
-            if (useStats != null && useStats.length() > 0)
-                tupleQuery.setBinding("useStats", vf.createLiteral(Boolean.parseBoolean(useStats)));
-            if (timeUris != null && timeUris.length() > 0)
-                tupleQuery.setBinding("timeUris", vf.createURI(timeUris));
-            SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(os);
-            tupleQuery.evaluate(sparqlWriter);
-
-        } catch (Exception e) {
-            resp.setStatus(500);
-            e.printStackTrace(new PrintStream(resp.getOutputStream()));
-            throw new ServletException(e);
-        } finally {
-            if (conn != null) {
-                try {
-                    conn.close();
-                } catch (RepositoryException e) {
-
-                }
-            }
-        }
-    }
-
-    public Repository getRepository() {
-        return repository;
-    }
-
-    public void setRepository(Repository repository) {
-        this.repository = repository;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QuerySerqlDataServlet.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QuerySerqlDataServlet.java b/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QuerySerqlDataServlet.java
deleted file mode 100644
index d2bdc29..0000000
--- a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/QuerySerqlDataServlet.java
+++ /dev/null
@@ -1,116 +0,0 @@
-//package mvm.cloud.rdf.web.partition;
-//
-//import org.openrdf.query.GraphQuery;
-//import org.openrdf.query.QueryLanguage;
-//import org.openrdf.query.TupleQuery;
-//import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter;
-//import org.openrdf.repository.Repository;
-//import org.openrdf.repository.RepositoryConnection;
-//import org.openrdf.repository.RepositoryException;
-//import org.openrdf.rio.rdfxml.RDFXMLWriter;
-//
-//import javax.servlet.ServletException;
-//import javax.servlet.ServletOutputStream;
-//import javax.servlet.http.HttpServletRequest;
-//import javax.servlet.http.HttpServletResponse;
-//import java.io.IOException;
-//import java.io.PrintStream;
-//
-//public class QuerySerqlDataServlet extends AbstractRDFWebServlet {
-//
-//    @Override
-//    protected void doGet(HttpServletRequest req, HttpServletResponse resp)
-//            throws ServletException, IOException {
-//        if (req == null || req.getInputStream() == null)
-//            return;
-//
-//        String query = req.getParameter("query");
-//
-//        if (query == null) {
-//            throw new ServletException("Please set a query");
-//        }
-//
-//        if (query.toLowerCase().contains("select")) {
-//            try {
-//                performSelect(query, resp);
-//            } catch (Exception e) {
-//                throw new ServletException(e);
-//            }
-//        } else if (query.toLowerCase().contains("construct")) {
-//            try {
-//                performConstruct(query, resp);
-//            } catch (Exception e) {
-//                throw new ServletException(e);
-//            }
-//        } else {
-//            throw new ServletException("Invalid SERQL query: " + query);
-//        }
-//
-//    }
-//
-//    private void performConstruct(String query, HttpServletResponse resp)
-//            throws Exception {
-//        RepositoryConnection conn = null;
-//        try {
-//            ServletOutputStream os = resp.getOutputStream();
-//            conn = repository.getConnection();
-//
-//            // query data
-//            GraphQuery graphQuery = conn.prepareGraphQuery(
-//                    QueryLanguage.SERQL, query);
-//            RDFXMLWriter rdfWriter = new RDFXMLWriter(os);
-//            graphQuery.evaluate(rdfWriter);
-//
-//            conn.close();
-//        } catch (Exception e) {
-//            resp.setStatus(500);
-//            e.printStackTrace(new PrintStream(resp.getOutputStream()));
-//            throw new ServletException(e);
-//        } finally {
-//            if (conn != null) {
-//                try {
-//                    conn.close();
-//                } catch (RepositoryException e) {
-//
-//                }
-//            }
-//        }
-//    }
-//
-//    private void performSelect(String query, HttpServletResponse resp)
-//            throws Exception {
-//        RepositoryConnection conn = null;
-//        try {
-//            ServletOutputStream os = resp.getOutputStream();
-//            conn = repository.getConnection();
-//
-//            // query data
-//            TupleQuery tupleQuery = conn.prepareTupleQuery(
-//                    QueryLanguage.SERQL, query);
-//            SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(os);
-//            tupleQuery.evaluate(sparqlWriter);
-//
-//            conn.close();
-//        } catch (Exception e) {
-//            resp.setStatus(500);
-//            e.printStackTrace(new PrintStream(resp.getOutputStream()));
-//            throw new ServletException(e);
-//        } finally {
-//            if (conn != null) {
-//                try {
-//                    conn.close();
-//                } catch (RepositoryException e) {
-//
-//                }
-//            }
-//        }
-//    }
-//
-//    public Repository getRepository() {
-//        return repository;
-//    }
-//
-//    public void setRepository(Repository repository) {
-//        this.repository = repository;
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/RDFWebConstants.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/RDFWebConstants.java b/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/RDFWebConstants.java
deleted file mode 100644
index 154db3a..0000000
--- a/partition/web.partition.rdf/src/main/java/mvm/cloud/rdf/web/partition/RDFWebConstants.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package mvm.cloud.rdf.web.partition;
-
-/**
- * Interface RDFWebConstants
- * Date: Dec 13, 2010
- * Time: 9:39:45 AM
- */
-public interface RDFWebConstants {
-    public static final String INSTANCE_PARAM = "rts.instance";
-    public static final String ZK_PARAM = "rts.zk";
-    public static final String USER_PARAM = "rts.user";
-    public static final String PASSWORD_PARAM = "rts.password";
-    public static final String TABLE_PARAM = "rts.table";
-    public static final String SHARDTABLE_PARAM = "rts.shardtable";
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/webapp/WEB-INF/web.xml b/partition/web.partition.rdf/src/main/webapp/WEB-INF/web.xml
deleted file mode 100644
index 853401c..0000000
--- a/partition/web.partition.rdf/src/main/webapp/WEB-INF/web.xml
+++ /dev/null
@@ -1,77 +0,0 @@
-<!DOCTYPE web-app PUBLIC
- "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
- "http://java.sun.com/dtd/web-app_2_3.dtd" >
-
-<web-app>
-  <display-name>RDF Cloud Triple Store Web Access</display-name>
-  <servlet>
-		<servlet-name>LoadDataServlet</servlet-name>
-		<servlet-class>mvm.cloud.rdf.web.partition.LoadDataServlet</servlet-class>
-
-		<init-param>
-			<param-name>rts.instance</param-name>
-			<param-value>stratus</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.zk</param-name>
-			<param-value>10.40.190.113:2181</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.table</param-name>
-			<param-value>partitionRdf</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.shardtable</param-name>
-			<param-value>partitionRdf</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.user</param-name>
-			<param-value>root</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.password</param-name>
-			<param-value>password</param-value>
-		</init-param>
-	</servlet>
-
-	<servlet-mapping>
-		<servlet-name>LoadDataServlet</servlet-name>
-		<url-pattern>/loadrdf</url-pattern>
-	</servlet-mapping>
-
-  <servlet>
-		<servlet-name>QueryDataServlet</servlet-name>
-		<servlet-class>mvm.cloud.rdf.web.partition.QueryDataServlet</servlet-class>
-
-		<init-param>
-			<param-name>rts.instance</param-name>
-			<param-value>stratus</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.zk</param-name>
-			<param-value>10.40.190.113:2181</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.table</param-name>
-			<param-value>partitionRdf</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.shardtable</param-name>
-			<param-value>partitionRdf</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.user</param-name>
-			<param-value>root</param-value>
-		</init-param>
-		<init-param>
-			<param-name>rts.password</param-name>
-			<param-value>password</param-value>
-		</init-param>
-	</servlet>
-
-	<servlet-mapping>
-		<servlet-name>QueryDataServlet</servlet-name>
-		<url-pattern>/queryrdf</url-pattern>
-	</servlet-mapping>
-
-</web-app>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/webapp/crossdomain.xml
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/webapp/crossdomain.xml b/partition/web.partition.rdf/src/main/webapp/crossdomain.xml
deleted file mode 100644
index c3b5339..0000000
--- a/partition/web.partition.rdf/src/main/webapp/crossdomain.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE cross-domain-policy SYSTEM "http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd">
-<cross-domain-policy>
-    <allow-access-from domain="*" secure="false"/>
-</cross-domain-policy>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/webapp/serqlQuery.jsp
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/webapp/serqlQuery.jsp b/partition/web.partition.rdf/src/main/webapp/serqlQuery.jsp
deleted file mode 100644
index 0e49d7a..0000000
--- a/partition/web.partition.rdf/src/main/webapp/serqlQuery.jsp
+++ /dev/null
@@ -1,36 +0,0 @@
-<%@ page contentType="text/html; charset=iso-8859-1" language="java" %>
-<%@ page import="java.net.*" %>
-<%
-    String serql=request.getParameter("serql");
-    if(serql != null){
-        String serqlEnc = URLEncoder.encode(serql,"UTF-8");
-        String urlTo = "queryserql?query=" + serqlEnc;
-        response.sendRedirect(urlTo);
-    }
-%>
-<html>
-<body>
-<form name="serqlQuery" method="post" action="serqlQuery.jsp">
-<table width="100%" border="0" cellspacing="0" cellpadding="0">
-  <tr>
-    <td width="22%">&nbsp;</td>
-    <td width="78%">&nbsp;</td>
-    </tr>
-  <tr>
-    <td>SERQL Query: </td>
-    <td><textarea cols="150" rows="40" name="serql">
-Enter Serql query here
-    </textarea></td>
-  </tr>
-  <tr>
-    <td>&nbsp;</td>
-    <td><input type="submit" name="submit" value="Submit"></td>
-    </tr>
-  <tr>
-    <td>&nbsp;</td>
-    <td>&nbsp;</td>
-    </tr>
-</table>
-</form>
-</body>
-</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/main/webapp/sparqlQuery.jsp
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/main/webapp/sparqlQuery.jsp b/partition/web.partition.rdf/src/main/webapp/sparqlQuery.jsp
deleted file mode 100644
index 090d5e2..0000000
--- a/partition/web.partition.rdf/src/main/webapp/sparqlQuery.jsp
+++ /dev/null
@@ -1,47 +0,0 @@
-<%@ page contentType="text/html; charset=iso-8859-1" language="java" %>
-<%@ page import="java.net.*" %>
-<%
-    String sparql=request.getParameter("sparql");
-    String endTime=request.getParameter("endTime");
-    String startTime=request.getParameter("startTime");
-    
-    if(sparql != null){
-        String sparqlEnc = URLEncoder.encode(sparql,"UTF-8");
-        String urlTo = "queryrdf?binding.start="+startTime+"&binding.end="+endTime+"&query=" + sparqlEnc;
-        response.sendRedirect(urlTo);
-    }
-%>
-<html>
-<body>
-<form name="sparqlQuery" method="post" action="sparqlQuery.jsp">
-<table width="100%" border="0" cellspacing="0" cellpadding="0">
-  <tr>
-    <td width="22%">&nbsp;</td>
-    <td width="78%">&nbsp;</td>
-    </tr>
-  <tr>
-    <td>SPARQL Query: </td>
-    <td><textarea cols="200" rows="60" name="sparql">
-Enter Sparql query here
-    </textarea></td>
-  </tr>
-  <tr>
-    <td>Start Time</td>
-    <td><INPUT TYPE=TEXT NAME="startTime" SIZE="20"></td>
-  </tr>
-  <tr>
-    <td>End Time</td>
-    <td><INPUT TYPE=TEXT NAME="endTime" SIZE="20"></td>
-  </tr>
-  <tr>
-    <td>&nbsp;</td>
-    <td><input type="submit" name="submit" value="Submit"></td>
-    </tr>
-  <tr>
-    <td>&nbsp;</td>
-    <td>&nbsp;</td>
-    </tr>
-</table>
-</form>
-</body>
-</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java b/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java
deleted file mode 100644
index 5e84d34..0000000
--- a/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java
+++ /dev/null
@@ -1,455 +0,0 @@
-package mvm.cloud.rdf.web.cloudbase.sail;
-
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.net.URLEncoder;
-
-public class DeleteDataServletRun {
-
-    public static void main(String[] args) {
-        try {
-//		String query = "SELECT ?artist WHERE { ?abt <http://www.recshop.fake/cd#year> \"1988\"." +
-//				" }";
-            String artistQuery = "SELECT ?artist WHERE { "
-                    + " ?abt <http://www.recshop.fake/cd#artist> ?artist . "
-                    + " ?abt <http://www.recshop.fake/cd#year> \"1993\" . "
-                    + "}";
-//		String query = "SELECT ?pred ?obj WHERE { <http://www.recshop.fake/cd/Empire_Burlesque> ?pred ?obj }";
-//		String query = "SELECT ?pred ?label ?obj WHERE { <http://purl.org/swag/sbp/tab#A5> ?pred ?obj ." +
-//				" ?obj <http://www.w3.org/2000/01/rdf-schema#label> ?label }";
-            long dayBefore = System.currentTimeMillis() - 86400000;
-            System.out.println(dayBefore);
-//        String query = "SELECT DISTINCT ?obj WHERE { ?serv <http://mvm.com/rdf/mm/relatesTo> <http://mvm.com/rdf/mm/LTS::stratus30> . " +
-//				" ?serv <http://mvm.com/rdf/mm/relatesTo> ?obj ." +
-//                " ?serv <http://mvm.com/rdf/mm/timestamp> ?ts ." +
-////                " FILTER (?ts >= '"+dayBefore+"') " +
-//                " }" +
-//                " ORDER BY ?obj ";
-
-            String giveAllClusters = "SELECT DISTINCT ?uu WHERE { ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj . " +
-                    " }" +
-                    " ORDER BY ?uu ";
-
-//        String query = "SELECT DISTINCT ?obj WHERE { <http://mvm.com/rdf/mm/1a4eaa7c-842c-456a-94c0-6547de6be841> <http://mvm.com/rdf/mm/relatesTo> ?obj . " +
-//                " }" +
-//                " ORDER BY ?obj ";
-
-            //hasfunction query
-            String hasFunctionQuery = "SELECT DISTINCT ?obj WHERE { ?uu <http://mvm.com/rdf/mm/hasFunction> <http://mvm.com/rdf/mm/america> . " +
-                    " ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj" +
-                    " }" +
-                    " ORDER BY ?obj ";
-
-            String allFunctions = "SELECT DISTINCT ?func ?obj WHERE { ?uu <http://mvm.com/rdf/mm/hasFunction> ?func . " +
-                    " ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj" +
-                    " }" +
-                    " ORDER BY ?func ";
-
-            String allFunctionsThresh = "SELECT DISTINCT ?func ?obj ?thresh WHERE { ?uu <http://mvm.com/rdf/mm/hasFunction> ?func . " +
-                    " ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj ." +
-                    " ?uu <http://mvm.com/rdf/mm/threshold> ?thresh" +
-                    " }" +
-                    " ORDER BY ?func ";
-
-
-            String cwdQuery = "SELECT DISTINCT ?obj ?packname WHERE { ?subj <urn:mvm.cwd/2.0/man/uuid> ?obj . " +
-                    " ?subj <urn:mvm.cwd/2.0/man/installedPackages> ?instPacks ." +
-                    " ?instPacks <urn:mvm.cwd/2.0/man/package> ?packid ." +
-                    " ?packid <urn:mvm.cwd/2.0/man/name> ?packname } ";
-
-            String cwdAllServersQuery = "SELECT DISTINCT ?obj WHERE { ?subj <urn:mvm.cwd/2.0/man/uuid> ?obj } ";
-
-            // rearrange for better filter
-            // 0.124s
-            String lubm1 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x ub:takesCourse <http://www.Department0.University0.edu/GraduateCourse0> .\n" +
-//                "     ?x rdf:type ub:GraduateStudent .\n" +
-                    " }";
-
-            // 142s
-            // not sure why it is so long will have to do some more tests
-            String lubm2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?z ub:subOrganizationOf ?y .\n" +
-                    "      ?y rdf:type ub:University .\n" +
-                    "      ?z rdf:type ub:Department .\n" +
-                    "      ?x ub:memberOf ?z .\n" +
-                    "      ?x rdf:type ub:GraduateStudent .\n" +
-                    "      ?x ub:undergraduateDegreeFrom ?y .\n" +
-                    " }";
-
-            String lubm2_a = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <http://test.univ.onto.org#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?x rdf:type ub:GraduateStudent .\n" +
-                    "      ?x ub:memberOf ?z .\n" +
-                    "      ?z ub:subOrganizationOf ?y .\n" +
-                    "      ?z rdf:type ub:Department .\n" +
-                    "      ?y rdf:type ub:University .\n" +
-//                "      ?x ub:undergraduateDegreeFrom ?y .\n" +
-                    " }";
-
-            // 0.127s
-            // Rearranged to put the assistant professor first, better filtering
-            String lubm3 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?x ub:publicationAuthor <http://www.Department0.University0.edu/AssistantProfessor0> .\n" +
-                    "      ?x rdf:type ub:Publication .\n" +
-                    " }";
-
-//        had to infer relationships myself
-//        0.671s
-            String lubm4 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?y ub:worksFor <http://www.Department0.University5.edu> .\n" +
-                    "      ?x rdfs:subClassOf ub:Professor .\n" +
-                    "      ?y rdf:type ?x .\n" +
-                    "      ?y ub:name ?y1 .\n" +
-                    "      ?y ub:emailAddress ?y2 .\n" +
-                    "      ?y ub:telephone ?y3 .\n" +
-                    " }";
-
-            //lubm5, we cannot do inferring for more than one level now. Person is too difficult
-
-            //lubm6, we cannot do the implicit inference between Student and GraduateStudent
-
-            //lubm14
-            //0.1s
-            String lubm14 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x rdf:type ub:UndergraduateStudent .\n" +
-                    " }";
-
-            String bongoAllCollections = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x rdf:type bg:Collection .\n" +
-                    "     ?x bg:uniqueid ?uid .\n" +
-                    "     ?x bg:title ?title .\n" +
-                    "     ?x bg:hasAuthor ?author .\n" +
-                    "     ?x bg:marking ?marking .\n" +
-                    " }";
-
-            String bongoEntriesForCategory = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:hasCategory ?category .\n" +
-                    "     FILTER (?category = \"cat1\") \n" +
-                    " }";
-
-            String bongoEntriesForAuthor = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:hasAuthor ?author .\n" +
-                    "     FILTER (?author = \"andrew2\") \n" +
-                    " }";
-
-            String bongoEntriesForModifiedTime = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT DISTINCT ?entryid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:modifiedTime ?modifiedTime .\n" +
-                    "     FILTER (xsd:dateTime(?modifiedTime) >= \"2011-10-21T13:18:30\"^^xsd:dateTime) \n" +
-                    " }";
-            String bongoEntriesSortTitle = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT DISTINCT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:title ?title .\n" +
-                    " } ORDER BY ?title";
-
-            String bongoEntriesForTitle = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT DISTINCT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:title ?title .\n" +
-                    "     FILTER (regex(?title,\"Entry1Title\")) }";
-
-            String bongoQuery = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?col rdf:type bg:Collection .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2\\/S\\/P\\/Stock\\/Google_simple\\/6 bg:uniqueid ?uniqueid} .\n" +
-//                "     OPTIONAL{ bg:'latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6' bg:title ?title} .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:name ?name} .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:marking ?marking} .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:hasAuthor ?author} .\n" +
-                    " }";
-
-            String bongoAllEntriesInCollection = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?y bg:inCollection bg:CollA .\n" +
-                    "     ?y rdf:type bg:Entry .\n" +
-                    "     ?y bg:uniqueid ?uid .\n" +
-                    "     ?y bg:title ?title .\n" +
-                    "     ?y bg:etag ?etag .\n" +
-                    " }";
-
-            String bongoAllForEntry1 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     bg:EntryM rdf:type bg:Entry .\n" +
-//                "     bg:EntryN bg:inCollection bg:CollectionN .\n" +
-                    "     bg:EntryM bg:mimeType ?mimeType .\n" +
-                    "     bg:EntryM bg:etag ?etag .\n" +
-                    "     OPTIONAL { bg:EntryM bg:slug ?slug}.\n" +
-                    "     bg:EntryM bg:uniqueid ?uniqueid .\n" +
-//                "     bg:EntryN bg:title ?title .\n" +
-//                "     bg:EntryN bg:marking ?marking .\n" +
-//                "     bg:EntryN bg:mediaMarking ?mediaMarking .\n" +
-//                "     bg:EntryN bg:editedTime ?editedTime .\n" +
-//                "     bg:EntryN bg:modifiedTime ?modifiedTime .\n" +
-//                "     bg:EntryN bg:publishedTime ?publishedTime .\n" +
-//                "     bg:EntryN bg:mediaStorageId ?mediaStorageId .\n" +
-//                "     bg:EntryN bg:mediaModifiedTime ?mediaModifiedTime .\n" +
-//                "     bg:EntryN bg:entryStorageId ?entryStorageId .\n" +
-                    " }";
-
-            String bongoEntryAllAuthors = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     bg:Entry1 bg:hasAuthor ?y .\n" +
-                    " }";
-
-            String bongoEntriesModAfter = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x bg:editedTime ?edTime .\n" +
-                    "     FILTER (xsd:dateTime(?edTime) >= \"2010-01-01T00:00:00\"^^xsd:dateTime)\n" +
-                    " }";
-
-            String cimData = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX mm: <http://mvm.com/owl/mm.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x rdf:type mm:ComputerSystem .\n" +
-                    "     ?x mm:hasRunningOS ?y .\n" +
-                    "     ?y mm:name ?z .\n" +
-                    " }";
-
-            String cimData2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "  PREFIX mm: <http://mvm.com/owl/mm.owl#>\n" +
-                    "  PREFIX mmcs: <http://mvm.com/owl/mm.owl#urn:uuid:some:>\n" +
-                    "  SELECT  ?pred ?obj WHERE {\n" +
-                    "       mmcs:computersystem ?pred ?obj\n" +
-                    "  }";
-
-            String cimData3 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "SELECT ?pred ?obj WHERE {\n" +
-                    "<http://mvm.com/owl/mm.owl#urn:mm:mvm:lts:root/cimv2:PG_OperatingSystem.CreationClassName=CIM_OperatingSystem,CSCreationClassName=CIM_UnitaryComputerSystem,CSName=nimbus02.bullpen.net,Name=Red_Hat_Enterprise_Linux_Server> ?pred ?obj\n" +
-                    "}";
-
-            String cimHasInstalledSoftware = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT DISTINCT ?obj ?name ?caption WHERE {\n" +
-//                "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus06.bullpen.net:Red_Hat_Enterprise_Linux_Server> mm:hasInstalledSoftware ?obj .\n" +
-                    "     ?serv mm:hasInstalledSoftware ?obj .\n" +
-                    "      ?obj mm:name ?name ;\n" +
-                    "           mm:caption ?caption .\n" +
-                    "}";
-
-            String cimHasRunningSoftware = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> mm:hasRunningProcess ?obj .\n" +
-                    "     ?obj mm:name ?name ; \n" +
-                    "          mm:handle ?handle ; \n" +
-                    "          mm:description ?description ; \n" +
-                    "          mm:caption ?caption ; \n" +
-                    "          mm:parameters ?params . \n" +
-                    "}";
-
-            String cimCpu = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * \n" +
-                    "WHERE {\n" +
-                    "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:CIM_ComputerSystem:stratus10> mm:hasProcessor ?obj .\n" +
-                    "     ?obj mm:maxClockSpeed ?speed .\n" +
-                    "     ?obj mm:loadPercentage ?load .\n" +
-                    "     ?obj mm:elementName ?type ." +
-                    "}";
-
-            String cimCpuLoad = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * \n" +
-                    "WHERE {\n" +
-                    "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:CIM_ComputerSystem:stratus10> mm:hasProcessor ?obj .\n" +
-                    "     ?obj mm:loadPercentage ?load ." +
-                    "}";
-
-
-            String cimHasFileSystem = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-//                "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> mm:hasFileSystem ?obj ." +
-                    "     ?serv mm:hasFileSystem ?obj ." +
-                    "     ?obj mm:availableSpace ?available .\n" +
-                    "     ?obj mm:fileSystemSize ?size .\n" +
-                    "     ?obj mm:percentageSpaceUse ?use ." +
-                    "}";
-
-            String clusterKolm = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT ?name ?cluster ?srv ?ncd ?thresh ?ts WHERE {\n" +
-                    "     ?cluster kolm:relatesTo ?pt ;\n" +
-                    "              kolm:threshold ?thresh .\n" +
-                    "     ?pt kolm:serverRef ?srv ;\n" +
-                    "         kolm:ncd ?ncd ;\n" +
-                    "         kolm:timestamp ?ts .\n" +
-                    "     ?srv mm:CSName ?name .\n" +
-                    "} \n" +
-                    " ORDER BY ?cluster ?srv ?ncd";
-
-            String clusterKolm2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT ?cserv ?srv ?ncd ?thresh ?ts WHERE {\n" +
-                    "     ?cpt kolm:ncd \"0.0\" .\n" +
-                    "     ?cpt kolm:serverRef ?cserv .\n" +
-                    "     ?cluster kolm:relatesTo ?cpt ;\n" +
-                    "              kolm:relatesTo ?pt ;\n" +
-                    "              kolm:timestamp ?cts ;\n" +
-                    "              kolm:threshold ?thresh .\n" +
-                    "     ?pt kolm:serverRef ?srv ;\n" +
-                    "         kolm:ncd ?ncd ;\n" +
-                    "         kolm:timestamp ?ts .\n" +
-//                "     ?srv mm:CSName ?name .\n" +
-                    " FILTER (?cts >= \"1290616617624\")" +
-                    "} \n" +
-                    " ORDER BY ?cserv ?ncd ?srv";
-
-            String clusterKolmOtherClusters = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT ?cserv ?srv ?ncd WHERE {\n" +
-                    "     ?cpt kolm:ncd \"0.0\" .\n" +
-                    "     ?cpt kolm:serverRef ?cserv .\n" +
-                    "     ?cluster kolm:relatesTo ?cpt .\n" +
-                    "     ?cluster kolm:distanceTo ?pt .\n" +
-                    "     ?cluster kolm:timestamp ?cts .\n" +
-//                "              kolm:threshold ?thresh .\n" +
-                    "     ?pt kolm:serverRef ?srv ;\n" +
-                    "         kolm:ncd ?ncd ;\n" +
-                    "         kolm:timestamp ?ts .\n" +
-//                "     ?srv mm:CSName ?name .\n" +
-                    " FILTER (?cts >= \"1290616617624\")" +
-                    "} \n" +
-                    " ORDER BY ?cserv ?srv ?ncd";
-
-            String clusterKolmStratus13 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT DISTINCT ?srv ?ncd WHERE {\n" +
-                    "     ?pt kolm:serverRef <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus13:Red_Hat_Enterprise_Linux_Server> .\n" +
-                    "     ?cluster kolm:relatesTo ?pt .\n" +
-                    "     ?cluster kolm:relatesTo ?pt2 .\n" +
-                    "     ?pt2 kolm:serverRef ?srv .\n" +
-//                "     ?cluster kolm:relatesTo ?pt ;\n" +
-//                "              kolm:threshold ?thresh .\n" +
-//                "     ?pt kolm:serverRef <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> ;\n" +
-                    "       ?pt2  kolm:ncd ?ncd .\n" +
-                    "       ?cluster kolm:timestamp ?ts .\n" +
-//                "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> mm:CSName ?name .\n" +
-                    "} \n" +
-                    " ORDER BY ?ncd";
-
-            String cimLatestMeasure = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT ?proc ?val ?time WHERE {\n" +
-                    "     ?proc mm:loadPercentage ?val .\n" +
-                    "     ?subj rdf:subject ?proc .\n" +
-                    "     ?subj rdf:object ?val2 .\n" +
-                    "     ?subj  rdf:type rdf:Statement ;\n" +
-                    "     \t    mm:reportedAt ?time .\n" +
-                    " FILTER (?val2 = ?val) }\n" +
-                    "ORDER BY DESC(?time)\n" +
-                    "LIMIT 250";
-
-            String deleteBlankNodesCim = "prefix Base: <http://mvm.com/base/2011/05/base.owl#>\n" +
-                    "prefix Core: <http://mvm.com/2011/05/core#>\n" +
-                    "\n" +
-                    "select *\n" +
-                    "{ \n" +
-                    "\n" +
-                    "?subj a Core:UnitaryComputerSystem .\n" +
-                    "?subj ?pred ?obj .\n" +
-                    "FILTER isBlank(?server).\n" +
-                    "}";
-
-//        String query = "DELETE {?subj <http://mvm.com/rdf/mm/relatesTo> <http://mvm.com/rdf/mm/LTS::stratus30>} WHERE { ?subj <http://mvm.com/rdf/mm/relatesTo> <http://mvm.com/rdf/mm/LTS::stratus30>}";
-//
-            String query = deleteBlankNodesCim;
-            System.out.println(query);
-            System.out.println(System.currentTimeMillis());
-
-            /**
-             * Create url object to POST to the running container
-             */
-
-            String queryenc = URLEncoder.encode(query, "UTF-8");
-
-            URL url = new URL("http://10.40.190.113:8080/rdfTripleStore/deletequery?query=" + queryenc);
-            URLConnection urlConnection = url.openConnection();
-            urlConnection.setDoOutput(true);
-
-            /**
-             * Get the corresponding response from server, if any
-             */
-            BufferedReader rd = new BufferedReader(new InputStreamReader(
-                    urlConnection.getInputStream()));
-            String line;
-            while ((line = rd.readLine()) != null) {
-                System.out.println(line);
-            }
-            rd.close();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java b/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java
deleted file mode 100644
index 4ffd517..0000000
--- a/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package mvm.cloud.rdf.web.cloudbase.sail;
-
-import com.google.common.io.ByteStreams;
-import junit.framework.TestCase;
-
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.net.URL;
-import java.net.URLConnection;
-
-public class LoadDataServletRun extends TestCase {
-
-    public static void main(String[] args) {
-        try {
-            /**
-             * Create url object to POST to the running container
-             */
-
-            final InputStream resourceAsStream = Thread.currentThread().getContextClassLoader()
-                    .getResourceAsStream("n3trips.txt");
-            URL url = new URL("http://10.41.1.95:8080/rdfTripleStoreInfer/loadrdf" +
-                    "?format=N-Triples" +
-                    "");
-            URLConnection urlConnection = url.openConnection();
-            urlConnection.setDoOutput(true);
-
-            final OutputStream os = urlConnection.getOutputStream();
-
-            System.out.println(resourceAsStream);
-            ByteStreams.copy(resourceAsStream, os);
-            os.flush();
-
-            /**
-             * Get the corresponding response from server, if any
-             */
-            BufferedReader rd = new BufferedReader(new InputStreamReader(
-                    urlConnection.getInputStream()));
-            String line;
-            while ((line = rd.readLine()) != null) {
-                System.out.println(line);
-            }
-            rd.close();
-            os.close();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java b/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java
deleted file mode 100644
index 42513c3..0000000
--- a/partition/web.partition.rdf/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java
+++ /dev/null
@@ -1,444 +0,0 @@
-package mvm.cloud.rdf.web.cloudbase.sail;
-
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.net.URLEncoder;
-
-public class QueryDataServletRun {
-
-    public static void main(String[] args) {
-        try {
-//		String query = "SELECT ?artist WHERE { ?abt <http://www.recshop.fake/cd#year> \"1988\"." +
-//				" }";
-            String artistQuery = "SELECT ?artist WHERE { "
-                    + " ?abt <http://www.recshop.fake/cd#artist> ?artist . "
-                    + " ?abt <http://www.recshop.fake/cd#year> \"1993\" . "
-                    + "}";
-//		String query = "SELECT ?pred ?obj WHERE { <http://www.recshop.fake/cd/Empire_Burlesque> ?pred ?obj }";
-//		String query = "SELECT ?pred ?label ?obj WHERE { <http://purl.org/swag/sbp/tab#A5> ?pred ?obj ." +
-//				" ?obj <http://www.w3.org/2000/01/rdf-schema#label> ?label }";
-            long dayBefore = System.currentTimeMillis() - 86400000;
-            System.out.println(dayBefore);
-//        String query = "SELECT DISTINCT ?obj WHERE { ?serv <http://mvm.com/rdf/mm/relatesTo> <http://mvm.com/rdf/mm/LTS::stratus30> . " +
-//				" ?serv <http://mvm.com/rdf/mm/relatesTo> ?obj ." +
-//                " ?serv <http://mvm.com/rdf/mm/timestamp> ?ts ." +
-////                " FILTER (?ts >= '"+dayBefore+"') " +
-//                " }" +
-//                " ORDER BY ?obj ";
-
-            String giveAllClusters = "SELECT DISTINCT ?uu WHERE { ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj . " +
-                    " }" +
-                    " ORDER BY ?uu ";
-
-//        String query = "SELECT DISTINCT ?obj WHERE { <http://mvm.com/rdf/mm/1a4eaa7c-842c-456a-94c0-6547de6be841> <http://mvm.com/rdf/mm/relatesTo> ?obj . " +
-//                " }" +
-//                " ORDER BY ?obj ";
-
-            //hasfunction query
-            String hasFunctionQuery = "SELECT DISTINCT ?obj WHERE { ?uu <http://mvm.com/rdf/mm/hasFunction> <http://mvm.com/rdf/mm/america> . " +
-                    " ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj" +
-                    " }" +
-                    " ORDER BY ?obj ";
-
-            String allFunctions = "SELECT DISTINCT ?func ?obj WHERE { ?uu <http://mvm.com/rdf/mm/hasFunction> ?func . " +
-                    " ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj" +
-                    " }" +
-                    " ORDER BY ?func ";
-
-            String allFunctionsThresh = "SELECT DISTINCT ?func ?obj ?thresh WHERE { ?uu <http://mvm.com/rdf/mm/hasFunction> ?func . " +
-                    " ?uu <http://mvm.com/rdf/mm/relatesTo> ?obj ." +
-                    " ?uu <http://mvm.com/rdf/mm/threshold> ?thresh" +
-                    " }" +
-                    " ORDER BY ?func ";
-
-
-            String cwdQuery = "SELECT DISTINCT ?obj ?packname WHERE { ?subj <urn:mvm.cwd/2.0/man/uuid> ?obj . " +
-                    " ?subj <urn:mvm.cwd/2.0/man/installedPackages> ?instPacks ." +
-                    " ?instPacks <urn:mvm.cwd/2.0/man/package> ?packid ." +
-                    " ?packid <urn:mvm.cwd/2.0/man/name> ?packname } ";
-
-            String cwdAllServersQuery = "SELECT DISTINCT ?obj WHERE { ?subj <urn:mvm.cwd/2.0/man/uuid> ?obj } ";
-
-            // rearrange for better filter
-            // 0.124s
-            String lubm1 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x ub:takesCourse <http://www.Department0.University0.edu/GraduateCourse0> .\n" +
-//                "     ?x rdf:type ub:GraduateStudent .\n" +
-                    " }";
-
-            // 142s
-            // not sure why it is so long will have to do some more tests
-            String lubm2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?z ub:subOrganizationOf ?y .\n" +
-                    "      ?y rdf:type ub:University .\n" +
-                    "      ?z rdf:type ub:Department .\n" +
-                    "      ?x ub:memberOf ?z .\n" +
-                    "      ?x rdf:type ub:GraduateStudent .\n" +
-                    "      ?x ub:undergraduateDegreeFrom ?y .\n" +
-                    " }";
-
-            String lubm2_a = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <http://test.univ.onto.org#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?x rdf:type ub:GraduateStudent .\n" +
-                    "      ?x ub:memberOf ?z .\n" +
-                    "      ?z ub:subOrganizationOf ?y .\n" +
-                    "      ?z rdf:type ub:Department .\n" +
-                    "      ?y rdf:type ub:University .\n" +
-//                "      ?x ub:undergraduateDegreeFrom ?y .\n" +
-                    " }";
-
-            // 0.127s
-            // Rearranged to put the assistant professor first, better filtering
-            String lubm3 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?x ub:publicationAuthor <http://www.Department0.University0.edu/AssistantProfessor0> .\n" +
-                    "      ?x rdf:type ub:Publication .\n" +
-                    " }";
-
-//        had to infer relationships myself
-//        0.671s
-            String lubm4 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?y ub:worksFor <http://www.Department0.University5.edu> .\n" +
-                    "      ?x rdfs:subClassOf ub:Professor .\n" +
-                    "      ?y rdf:type ?x .\n" +
-                    "      ?y ub:name ?y1 .\n" +
-                    "      ?y ub:emailAddress ?y2 .\n" +
-                    "      ?y ub:telephone ?y3 .\n" +
-                    " }";
-
-            //lubm5, we cannot do inferring for more than one level now. Person is too difficult
-
-            //lubm6, we cannot do the implicit inference between Student and GraduateStudent
-
-            //lubm14
-            //0.1s
-            String lubm14 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:edu.lubm#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x rdf:type ub:UndergraduateStudent .\n" +
-                    " }";
-
-            String bongoAllCollections = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x rdf:type bg:Collection .\n" +
-                    "     ?x bg:uniqueid ?uid .\n" +
-                    "     ?x bg:title ?title .\n" +
-                    "     ?x bg:hasAuthor ?author .\n" +
-                    "     ?x bg:marking ?marking .\n" +
-                    " }";
-
-            String bongoEntriesForCategory = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:hasCategory ?category .\n" +
-                    "     FILTER (?category = \"cat1\") \n" +
-                    " }";
-
-            String bongoEntriesForAuthor = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:hasAuthor ?author .\n" +
-                    "     FILTER (?author = \"andrew2\") \n" +
-                    " }";
-
-            String bongoEntriesForModifiedTime = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT DISTINCT ?entryid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:modifiedTime ?modifiedTime .\n" +
-                    "     FILTER (xsd:dateTime(?modifiedTime) >= \"2011-10-21T13:18:30\"^^xsd:dateTime) \n" +
-                    " }";
-            String bongoEntriesSortTitle = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT DISTINCT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:title ?title .\n" +
-                    " } ORDER BY ?title";
-
-            String bongoEntriesForTitle = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT DISTINCT ?uniqueid WHERE\n" +
-                    " {\n" +
-                    "     ?entryid bg:inCollection bg:CollA .\n" +
-                    "     ?entryid rdf:type bg:Entry .\n" +
-                    "     ?entryid bg:uniqueid ?uniqueid .\n" +
-                    "     ?entryid bg:title ?title .\n" +
-                    "     FILTER (regex(?title,\"Entry1Title\")) }";
-
-            String bongoQuery = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?col rdf:type bg:Collection .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2\\/S\\/P\\/Stock\\/Google_simple\\/6 bg:uniqueid ?uniqueid} .\n" +
-//                "     OPTIONAL{ bg:'latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6' bg:title ?title} .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:name ?name} .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:marking ?marking} .\n" +
-//                "     OPTIONAL{ bg:latency_mixture2_perSupplier_norm2/S/P/Stock/Google_simple/6 bg:hasAuthor ?author} .\n" +
-                    " }";
-
-            String bongoAllEntriesInCollection = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?y bg:inCollection bg:CollA .\n" +
-                    "     ?y rdf:type bg:Entry .\n" +
-                    "     ?y bg:uniqueid ?uid .\n" +
-                    "     ?y bg:title ?title .\n" +
-                    "     ?y bg:etag ?etag .\n" +
-                    " }";
-
-            String bongoAllForEntry1 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     bg:EntryM rdf:type bg:Entry .\n" +
-//                "     bg:EntryN bg:inCollection bg:CollectionN .\n" +
-                    "     bg:EntryM bg:mimeType ?mimeType .\n" +
-                    "     bg:EntryM bg:etag ?etag .\n" +
-                    "     OPTIONAL { bg:EntryM bg:slug ?slug}.\n" +
-                    "     bg:EntryM bg:uniqueid ?uniqueid .\n" +
-//                "     bg:EntryN bg:title ?title .\n" +
-//                "     bg:EntryN bg:marking ?marking .\n" +
-//                "     bg:EntryN bg:mediaMarking ?mediaMarking .\n" +
-//                "     bg:EntryN bg:editedTime ?editedTime .\n" +
-//                "     bg:EntryN bg:modifiedTime ?modifiedTime .\n" +
-//                "     bg:EntryN bg:publishedTime ?publishedTime .\n" +
-//                "     bg:EntryN bg:mediaStorageId ?mediaStorageId .\n" +
-//                "     bg:EntryN bg:mediaModifiedTime ?mediaModifiedTime .\n" +
-//                "     bg:EntryN bg:entryStorageId ?entryStorageId .\n" +
-                    " }";
-
-            String bongoEntryAllAuthors = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     bg:Entry1 bg:hasAuthor ?y .\n" +
-                    " }";
-
-            String bongoEntriesModAfter = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX bg: <http://mvm.com/rdf/bongo/bongo.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x bg:editedTime ?edTime .\n" +
-                    "     FILTER (xsd:dateTime(?edTime) >= \"2010-01-01T00:00:00\"^^xsd:dateTime)\n" +
-                    " }";
-
-            String cimData = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX mm: <http://mvm.com/owl/mm.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x rdf:type mm:ComputerSystem .\n" +
-                    "     ?x mm:hasRunningOS ?y .\n" +
-                    "     ?y mm:name ?z .\n" +
-                    " }";
-
-            String cimData2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "  PREFIX mm: <http://mvm.com/owl/mm.owl#>\n" +
-                    "  PREFIX mmcs: <http://mvm.com/owl/mm.owl#urn:uuid:some:>\n" +
-                    "  SELECT  ?pred ?obj WHERE {\n" +
-                    "       mmcs:computersystem ?pred ?obj\n" +
-                    "  }";
-
-            String cimData3 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "SELECT ?pred ?obj WHERE {\n" +
-                    "<http://mvm.com/owl/mm.owl#urn:mm:mvm:lts:root/cimv2:PG_OperatingSystem.CreationClassName=CIM_OperatingSystem,CSCreationClassName=CIM_UnitaryComputerSystem,CSName=nimbus02.bullpen.net,Name=Red_Hat_Enterprise_Linux_Server> ?pred ?obj\n" +
-                    "}";
-
-            String cimHasInstalledSoftware = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT DISTINCT ?obj ?name ?caption WHERE {\n" +
-//                "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus06.bullpen.net:Red_Hat_Enterprise_Linux_Server> mm:hasInstalledSoftware ?obj .\n" +
-                    "     ?serv mm:hasInstalledSoftware ?obj .\n" +
-                    "      ?obj mm:name ?name ;\n" +
-                    "           mm:caption ?caption .\n" +
-                    "}";
-
-            String cimHasRunningSoftware = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> mm:hasRunningProcess ?obj .\n" +
-                    "     ?obj mm:name ?name ; \n" +
-                    "          mm:handle ?handle ; \n" +
-                    "          mm:description ?description ; \n" +
-                    "          mm:caption ?caption ; \n" +
-                    "          mm:parameters ?params . \n" +
-                    "}";
-
-            String cimCpu = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * \n" +
-                    "WHERE {\n" +
-                    "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:CIM_ComputerSystem:stratus10> mm:hasProcessor ?obj .\n" +
-                    "     ?obj mm:maxClockSpeed ?speed .\n" +
-                    "     ?obj mm:loadPercentage ?load .\n" +
-                    "     ?obj mm:elementName ?type ." +
-                    "}";
-
-            String cimCpuLoad = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * \n" +
-                    "WHERE {\n" +
-                    "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:CIM_ComputerSystem:stratus10> mm:hasProcessor ?obj .\n" +
-                    "     ?obj mm:loadPercentage ?load ." +
-                    "}";
-
-
-            String cimHasFileSystem = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-//                "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> mm:hasFileSystem ?obj ." +
-                    "     ?serv mm:hasFileSystem ?obj ." +
-                    "     ?obj mm:availableSpace ?available .\n" +
-                    "     ?obj mm:fileSystemSize ?size .\n" +
-                    "     ?obj mm:percentageSpaceUse ?use ." +
-                    "}";
-
-            String clusterKolm = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT ?name ?cluster ?srv ?ncd ?thresh ?ts WHERE {\n" +
-                    "     ?cluster kolm:relatesTo ?pt ;\n" +
-                    "              kolm:threshold ?thresh .\n" +
-                    "     ?pt kolm:serverRef ?srv ;\n" +
-                    "         kolm:ncd ?ncd ;\n" +
-                    "         kolm:timestamp ?ts .\n" +
-                    "     ?srv mm:CSName ?name .\n" +
-                    "} \n" +
-                    " ORDER BY ?cluster ?srv ?ncd";
-
-            String clusterKolm2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT ?cserv ?srv ?ncd ?thresh ?ts WHERE {\n" +
-                    "     ?cpt kolm:ncd \"0.0\" .\n" +
-                    "     ?cpt kolm:serverRef ?cserv .\n" +
-                    "     ?cluster kolm:relatesTo ?cpt ;\n" +
-                    "              kolm:relatesTo ?pt ;\n" +
-                    "              kolm:timestamp ?cts ;\n" +
-                    "              kolm:threshold ?thresh .\n" +
-                    "     ?pt kolm:serverRef ?srv ;\n" +
-                    "         kolm:ncd ?ncd ;\n" +
-                    "         kolm:timestamp ?ts .\n" +
-//                "     ?srv mm:CSName ?name .\n" +
-                    " FILTER (?cts >= \"1290616617624\")" +
-                    "} \n" +
-                    " ORDER BY ?cserv ?ncd ?srv";
-
-            String clusterKolmOtherClusters = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT ?cserv ?srv ?ncd WHERE {\n" +
-                    "     ?cpt kolm:ncd \"0.0\" .\n" +
-                    "     ?cpt kolm:serverRef ?cserv .\n" +
-                    "     ?cluster kolm:relatesTo ?cpt .\n" +
-                    "     ?cluster kolm:distanceTo ?pt .\n" +
-                    "     ?cluster kolm:timestamp ?cts .\n" +
-//                "              kolm:threshold ?thresh .\n" +
-                    "     ?pt kolm:serverRef ?srv ;\n" +
-                    "         kolm:ncd ?ncd ;\n" +
-                    "         kolm:timestamp ?ts .\n" +
-//                "     ?srv mm:CSName ?name .\n" +
-                    " FILTER (?cts >= \"1290616617624\")" +
-                    "} \n" +
-                    " ORDER BY ?cserv ?srv ?ncd";
-
-            String clusterKolmStratus13 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT DISTINCT ?srv ?ncd WHERE {\n" +
-                    "     ?pt kolm:serverRef <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus13:Red_Hat_Enterprise_Linux_Server> .\n" +
-                    "     ?cluster kolm:relatesTo ?pt .\n" +
-                    "     ?cluster kolm:relatesTo ?pt2 .\n" +
-                    "     ?pt2 kolm:serverRef ?srv .\n" +
-//                "     ?cluster kolm:relatesTo ?pt ;\n" +
-//                "              kolm:threshold ?thresh .\n" +
-//                "     ?pt kolm:serverRef <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> ;\n" +
-                    "       ?pt2  kolm:ncd ?ncd .\n" +
-                    "       ?cluster kolm:timestamp ?ts .\n" +
-//                "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus10:Red_Hat_Enterprise_Linux_Server> mm:CSName ?name .\n" +
-                    "} \n" +
-                    " ORDER BY ?ncd";
-
-            String cimLatestMeasure = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT ?proc ?val ?time WHERE {\n" +
-                    "     ?proc mm:loadPercentage ?val .\n" +
-                    "     ?subj rdf:subject ?proc .\n" +
-                    "     ?subj rdf:object ?val2 .\n" +
-                    "     ?subj  rdf:type rdf:Statement ;\n" +
-                    "     \t    mm:reportedAt ?time .\n" +
-                    " FILTER (?val2 = ?val) }\n" +
-                    "ORDER BY DESC(?time)\n" +
-                    "LIMIT 250";
-
-//        String query = "DELETE {?subj <http://mvm.com/rdf/mm/relatesTo> <http://mvm.com/rdf/mm/LTS::stratus30>} WHERE { ?subj <http://mvm.com/rdf/mm/relatesTo> <http://mvm.com/rdf/mm/LTS::stratus30>}";
-//
-            String query = artistQuery;
-            System.out.println(query);
-            System.out.println(System.currentTimeMillis());
-
-            /**
-             * Create url object to POST to the running container
-             */
-
-            String queryenc = URLEncoder.encode(query, "UTF-8");
-
-            URL url = new URL("http://10.41.1.109:8080/rdfTripleStore/queryrdf?query=" + queryenc);
-            URLConnection urlConnection = url.openConnection();
-            urlConnection.setDoOutput(true);
-
-            /**
-             * Get the corresponding response from server, if any
-             */
-            BufferedReader rd = new BufferedReader(new InputStreamReader(
-                    urlConnection.getInputStream()));
-            String line;
-            while ((line = rd.readLine()) != null) {
-                System.out.println(line);
-            }
-            rd.close();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/test/resources/cdrdf.xml
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/test/resources/cdrdf.xml b/partition/web.partition.rdf/src/test/resources/cdrdf.xml
deleted file mode 100644
index 888b60a..0000000
--- a/partition/web.partition.rdf/src/test/resources/cdrdf.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0"?>
-<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
-	xmlns:cd="http://www.recshop.fake/cd#">
-
-	<rdf:Description rdf:about="http://www.recshop.fake/cd/Empire_Burlesque">
-		<cd:artist>Bob Dylan</cd:artist>
-		<cd:country>USA</cd:country>
-		<cd:company>Columbia</cd:company>
-		<cd:price>10.90</cd:price>
-		<cd:year>1985</cd:year>
-	</rdf:Description>
-
-	<rdf:Description rdf:about="http://www.recshop.fake/cd/Hide_your_heart3">
-		<cd:artist>Bonnie Tyler3</cd:artist>
-		<cd:country>UK</cd:country>
-		<cd:company>CBS Records</cd:company>
-		<cd:price>9.90</cd:price>
-		<cd:year>1993</cd:year>
-	</rdf:Description>
-</rdf:RDF>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/web.partition.rdf/src/test/resources/n3trips.txt
----------------------------------------------------------------------
diff --git a/partition/web.partition.rdf/src/test/resources/n3trips.txt b/partition/web.partition.rdf/src/test/resources/n3trips.txt
deleted file mode 100644
index dfb2ded..0000000
--- a/partition/web.partition.rdf/src/test/resources/n3trips.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-<http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/ProductType1> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/vocabulary/ProductType> .
-<http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/ProductType1> <http://www.w3.org/2000/01/rdf-schema#label> "Thing" .
-<http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/ProductType1> <http://www.w3.org/2000/01/rdf-schema#comment> "The Product Type of all Products" .
-<http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/ProductType1> <http://purl.org/dc/elements/1.1/publisher> <http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/StandardizationInstitution1> .


[39/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java
index e413842..dfcc429 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/JJTQueryParserState.java
@@ -2,25 +2,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 public class JJTQueryParserState {
   private java.util.List<Node> nodes;
   private java.util.List<Integer> marks;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java
index 13d1649..1ef0bad 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/Node.java
@@ -3,25 +3,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 /* All AST nodes must implement this interface.  It provides basic
    machinery for constructing the parent and child relationships
    between nodes. */


[11/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java b/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java
deleted file mode 100644
index a40b770..0000000
--- a/sail/rya.sail.impl/src/test/java/mvm/rya/RdfCloudTripleStoreConnectionTest.java
+++ /dev/null
@@ -1,1362 +0,0 @@
-package mvm.rya;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.NAMESPACE;
-
-import java.io.InputStream;
-import java.util.List;
-
-import junit.framework.TestCase;
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.rdftriplestore.RdfCloudTripleStore;
-import mvm.rya.rdftriplestore.RyaSailRepository;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-import mvm.rya.rdftriplestore.namespace.NamespaceManager;
-
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.junit.Ignore;
-import org.openrdf.model.Literal;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.model.vocabulary.OWL;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.QueryResultHandlerException;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.Update;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.RepositoryResult;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.rio.RDFFormat;
-
-/**
- * Class RdfCloudTripleStoreConnectionTest
- * Date: Mar 3, 2011
- * Time: 12:03:29 PM
- */
-public class RdfCloudTripleStoreConnectionTest extends TestCase {
-    private Repository repository;
-    ValueFactoryImpl vf = new ValueFactoryImpl();
-    private InferenceEngine internalInferenceEngine;
-
-    static String litdupsNS = "urn:test:litdups#";
-    URI cpu = vf.createURI(litdupsNS, "cpu");
-    protected RdfCloudTripleStore store;
-
-    public void setUp() throws Exception {
-        super.setUp();
-        store = new MockRdfCloudStore();
-//        store.setDisplayQueryPlan(true);
-//        store.setInferencing(false);
-        NamespaceManager nm = new NamespaceManager(store.getRyaDAO(), store.getConf());
-        store.setNamespaceManager(nm);
-        repository = new RyaSailRepository(store);
-        repository.initialize();
-    }
-
-    public void tearDown() throws Exception {
-        super.tearDown();
-        repository.shutDown();
-    }
-
-    public void testAddStatement() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        URI uri1 = vf.createURI(litdupsNS, "uri1");
-        conn.add(cpu, loadPerc, uri1);
-        conn.commit();
-
-        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true);
-        int count = 0;
-        while (result.hasNext()) {
-            count++;
-            result.next();
-        }
-        result.close();
-        assertEquals(1, count);
-
-        //clean up
-        conn.remove(cpu, loadPerc, uri1);
-
-//        //test removal
-        result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]);
-        count = 0;
-        while (result.hasNext()) {
-            count++;
-            result.next();
-        }
-        result.close();
-        assertEquals(0, count);
-
-        conn.close();
-    }
-
-//    public void testAddAuth() throws Exception {
-//        RepositoryConnection conn = repository.getConnection();
-//        URI cpu = vf.createURI(litdupsNS, "cpu");
-//        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-//        URI uri1 = vf.createURI(litdupsNS, "uri1");
-//        URI uri2 = vf.createURI(litdupsNS, "uri2");
-//        URI uri3 = vf.createURI(litdupsNS, "uri3");
-//        URI auth1 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "1");
-//        URI auth2 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "2");
-//        URI auth3 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "3");
-//        conn.add(cpu, loadPerc, uri1, auth1, auth2, auth3);
-//        conn.add(cpu, loadPerc, uri2, auth2, auth3);
-//        conn.add(cpu, loadPerc, uri3, auth3);
-//        conn.commit();
-//
-//        //query with no auth
-//        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true);
-//        int count = 0;
-//        while (result.hasNext()) {
-//            count++;
-//            result.next();
-//        }
-//        assertEquals(0, count);
-//        result.close();
-//
-//        String query = "select * where {" +
-//                "<" + cpu.toString() + "> ?p ?o1." +
-//                "}";
-//        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("2"));
-//        CountTupleHandler cth = new CountTupleHandler();
-//        tupleQuery.evaluate(cth);
-//        assertEquals(2, cth.getCount());
-//
-//        conn.close();
-//    }
-
-    public void testEvaluate() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        URI uri1 = vf.createURI(litdupsNS, "uri1");
-        conn.add(cpu, loadPerc, uri1);
-        conn.commit();
-
-        String query = "select * where {" +
-                "?x <" + loadPerc.stringValue() + "> ?o1." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        assertEquals(cth.getCount(), 1);
-        conn.close();
-    }
-
-    public void testEvaluateMultiLine() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        URI uri1 = vf.createURI(litdupsNS, "uri1");
-        URI pred2 = vf.createURI(litdupsNS, "pred2");
-        URI uri2 = vf.createURI(litdupsNS, "uri2");
-        conn.add(cpu, loadPerc, uri1);
-        conn.add(cpu, pred2, uri2);
-        conn.commit();
-
-        String query = "select * where {" +
-                "?x <" + loadPerc.stringValue() + "> ?o1." +
-                "?x <" + pred2.stringValue() + "> ?o2." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, RdfCloudTripleStoreConstants.VALUE_FACTORY.createLiteral(true));
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(cth.getCount(), 1);
-    }
-
-    public void testPOObjRange() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        Literal six = vf.createLiteral("6");
-        Literal sev = vf.createLiteral("7");
-        Literal ten = vf.createLiteral("10");
-        conn.add(cpu, loadPerc, six);
-        conn.add(cpu, loadPerc, sev);
-        conn.add(cpu, loadPerc, ten);
-        conn.commit();
-
-        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
-                "select * where {" +
-                "?x <" + loadPerc.stringValue() + "> ?o.\n" +
-                "FILTER(mvm:range(?o, '6', '8'))." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(2, cth.getCount());
-    }
-
-    public void testPOPredRange() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc1");
-        URI loadPerc2 = vf.createURI(litdupsNS, "loadPerc2");
-        URI loadPerc3 = vf.createURI(litdupsNS, "loadPerc3");
-        URI loadPerc4 = vf.createURI(litdupsNS, "loadPerc4");
-        Literal six = vf.createLiteral("6");
-        Literal sev = vf.createLiteral("7");
-        Literal ten = vf.createLiteral("10");
-        conn.add(cpu, loadPerc, six);
-        conn.add(cpu, loadPerc2, sev);
-        conn.add(cpu, loadPerc4, ten);
-        conn.commit();
-
-        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
-                "select * where {" +
-                "?x ?p ?o.\n" +
-                "FILTER(mvm:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(cth.getCount(), 2);
-    }
-
-    public void testSPOPredRange() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc1");
-        URI loadPerc2 = vf.createURI(litdupsNS, "loadPerc2");
-        URI loadPerc3 = vf.createURI(litdupsNS, "loadPerc3");
-        URI loadPerc4 = vf.createURI(litdupsNS, "loadPerc4");
-        Literal six = vf.createLiteral("6");
-        Literal sev = vf.createLiteral("7");
-        Literal ten = vf.createLiteral("10");
-        conn.add(cpu, loadPerc, six);
-        conn.add(cpu, loadPerc2, sev);
-        conn.add(cpu, loadPerc4, ten);
-        conn.commit();
-
-        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
-                "select * where {" +
-                "<" + cpu.stringValue() + "> ?p ?o.\n" +
-                "FILTER(mvm:range(?p, <" + loadPerc.stringValue() + ">, <" + loadPerc3.stringValue() + ">))." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(2, cth.getCount());
-    }
-
-    public void testSPOSubjRange() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI cpu2 = vf.createURI(litdupsNS, "cpu2");
-        URI cpu3 = vf.createURI(litdupsNS, "cpu3");
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        Literal six = vf.createLiteral("6");
-        Literal sev = vf.createLiteral("7");
-        Literal ten = vf.createLiteral("10");
-        conn.add(cpu, loadPerc, six);
-        conn.add(cpu2, loadPerc, sev);
-        conn.add(cpu3, loadPerc, ten);
-        conn.commit();
-
-        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
-                "select * where {" +
-                "?s ?p ?o.\n" +
-                "FILTER(mvm:range(?s, <" + cpu.stringValue() + ">, <" + cpu2.stringValue() + ">))." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(cth.getCount(), 2);
-    }
-
-    public void testSPOObjRange() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        Literal six = vf.createLiteral("6");
-        Literal sev = vf.createLiteral("7");
-        Literal ten = vf.createLiteral("10");
-        conn.add(cpu, loadPerc, six);
-        conn.add(cpu, loadPerc, sev);
-        conn.add(cpu, loadPerc, ten);
-        conn.commit();
-
-        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
-                "select * where {" +
-                "<" + cpu.stringValue() + "> <" + loadPerc.stringValue() + "> ?o.\n" +
-                "FILTER(mvm:range(?o, '6', '8'))." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(cth.getCount(), 2);
-    }
-
-    public void testOSPObjRange() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        Literal six = vf.createLiteral("6");
-        Literal sev = vf.createLiteral("7");
-        Literal ten = vf.createLiteral("10");
-        conn.add(cpu, loadPerc, six);
-        conn.add(cpu, loadPerc, sev);
-        conn.add(cpu, loadPerc, ten);
-        conn.commit();
-
-        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
-                "select * where {" +
-                "?s ?p ?o.\n" +
-                "FILTER(mvm:range(?o, '6', '8'))." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(cth.getCount(), 2);
-    }
-    
-    public void testRegexFilter() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        URI testClass = vf.createURI(litdupsNS, "test");
-        Literal six = vf.createLiteral("6");
-        Literal sev = vf.createLiteral("7");
-        Literal ten = vf.createLiteral("10");
-        conn.add(cpu, loadPerc, six);
-        conn.add(cpu, loadPerc, sev);
-        conn.add(cpu, loadPerc, ten);
-        conn.add(cpu, RDF.TYPE, testClass);
-        conn.commit();
-
-        String query = "PREFIX mvm: <" + NAMESPACE + ">\n" +
-                "select * where {" +
-                String.format("<%s> ?p ?o.\n", cpu.stringValue()) +
-                "FILTER(regex(?o, '^1'))." +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler cth = new CountTupleHandler();
-        tupleQuery.evaluate(cth);
-        conn.close();
-        assertEquals(cth.getCount(), 1);
-    }
-
-    public void testMMRTS152() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        URI loadPerc = vf.createURI(litdupsNS, "testPred");
-        URI uri1 = vf.createURI(litdupsNS, "uri1");
-        conn.add(cpu, loadPerc, uri1);
-        conn.commit();
-
-        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, false, new Resource[0]);
-//        RdfCloudTripleStoreCollectionStatementsIterator iterator = new RdfCloudTripleStoreCollectionStatementsIterator(
-//                cpu, loadPerc, null, store.connector,
-//                vf, new Configuration(), null);
-
-        while (result.hasNext()) {
-            assertTrue(result.hasNext());
-            assertNotNull(result.next());
-        }
-
-        conn.close();
-    }
-
-    public void testDuplicateLiterals() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        Literal lit1 = vf.createLiteral(0.0);
-        Literal lit2 = vf.createLiteral(0.0);
-        Literal lit3 = vf.createLiteral(0.0);
-
-        conn.add(cpu, loadPerc, lit1);
-        conn.add(cpu, loadPerc, lit2);
-        conn.add(cpu, loadPerc, lit3);
-        conn.commit();
-
-        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]);
-        int count = 0;
-        while (result.hasNext()) {
-            count++;
-            result.next();
-        }
-        result.close();
-        assertEquals(1, count);
-
-        //clean up
-        conn.remove(cpu, loadPerc, lit1);
-        conn.close();
-    }
-
-    public void testNotDuplicateUris() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        URI uri1 = vf.createURI(litdupsNS, "uri1");
-        URI uri2 = vf.createURI(litdupsNS, "uri1");
-        URI uri3 = vf.createURI(litdupsNS, "uri1");
-
-        conn.add(cpu, loadPerc, uri1);
-        conn.add(cpu, loadPerc, uri2);
-        conn.add(cpu, loadPerc, uri3);
-        conn.commit();
-
-        RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true, new Resource[0]);
-        int count = 0;
-        while (result.hasNext()) {
-            count++;
-            result.next();
-        }
-        result.close();
-        assertEquals(1, count);
-
-        //clean up
-        conn.remove(cpu, loadPerc, uri1);
-        conn.close();
-    }
-
-    public void testNamespaceUsage() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-        conn.setNamespace("lit", litdupsNS);
-        URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
-        final URI uri1 = vf.createURI(litdupsNS, "uri1");
-        conn.add(cpu, loadPerc, uri1);
-        conn.commit();
-
-        String query = "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {lit:cpu lit:loadPerc ?o.}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(new TupleQueryResultHandler() {
-
-            @Override
-            public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-            }
-
-            @Override
-            public void endQueryResult() throws TupleQueryResultHandlerException {
-
-            }
-
-            @Override
-            public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-                assertTrue(uri1.toString().equals(bindingSet.getBinding("o").getValue().stringValue()));
-            }
-
-            @Override
-            public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException {
-            }
-
-            @Override
-            public void handleLinks(List<String> paramList) throws QueryResultHandlerException {
-            }
-        });
-        conn.close();
-    }
-
-    public void testSubPropertyOf() throws Exception {
-        if(internalInferenceEngine == null) return; //infer not supported;
-
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "degreeFrom")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "memberOf")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, vf.createURI(litdupsNS, "associatedWith")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "gradDegreeFrom"), vf.createURI(litdupsNS, "Yale")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "ProfessorC"), vf.createURI(litdupsNS, "memberOf"), vf.createURI(litdupsNS, "Harvard")));
-        conn.commit();
-        conn.close();
-
-        internalInferenceEngine.refreshGraph();
-
-        conn = repository.getConnection();
-
-        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:degreeFrom lit:Harvard.}";
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:memberOf lit:Harvard.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:associatedWith ?o.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:gradDegreeFrom lit:Yale.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testEquivPropOf() throws Exception {
-        if(internalInferenceEngine == null) return; //infer not supported;
-
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), OWL.EQUIVALENTPROPERTY, vf.createURI(litdupsNS, "ugradDegreeFrom")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "ugradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradC"), vf.createURI(litdupsNS, "ugraduateDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
-        conn.commit();
-        conn.close();
-
-        internalInferenceEngine.refreshGraph();
-
-        conn = repository.getConnection();
-
-        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:ugradDegreeFrom lit:Harvard.}";
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testSymmPropOf() throws Exception {
-        if(internalInferenceEngine == null) return; //infer not supported;
-
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Bob"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "James"), vf.createURI(litdupsNS, "friendOf"), vf.createURI(litdupsNS, "Jeff")));
-        conn.commit();
-        conn.close();
-
-        internalInferenceEngine.refreshGraph();
-
-        conn = repository.getConnection();
-
-        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:friendOf lit:Bob.}";
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:friendOf lit:James.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:friendOf lit:Jeff.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testTransitiveProp() throws Exception {
-        if(internalInferenceEngine == null) return; //infer not supported;
-
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Queens"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NYC")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "NYC"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NY")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "NY"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "US")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "US"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "NorthAmerica")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "NorthAmerica"), vf.createURI(litdupsNS, "subRegionOf"), vf.createURI(litdupsNS, "World")));
-        conn.commit();
-        conn.close();
-
-        internalInferenceEngine.refreshGraph();
-
-        conn = repository.getConnection();
-
-        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:subRegionOf lit:NorthAmerica.}";
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(4, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:subRegionOf lit:NY.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {lit:Queens lit:subRegionOf ?s.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(5, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {lit:NY lit:subRegionOf ?s.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testInverseOf() throws Exception {
-        if(internalInferenceEngine == null) return; //infer not supported;
-
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createURI(litdupsNS, "hasAlumnus")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Harvard"), vf.createURI(litdupsNS, "hasAlumnus"), vf.createURI(litdupsNS, "AlumC")));
-        conn.commit();
-        conn.close();
-
-        internalInferenceEngine.refreshGraph();
-
-        conn = repository.getConnection();
-
-        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {lit:Harvard lit:hasAlumnus ?s.}";
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s lit:degreeFrom lit:Harvard.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testSubClassOf() throws Exception {
-        if(internalInferenceEngine == null) return; //infer not supported;
-
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Student")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "Student"), RDFS.SUBCLASSOF, vf.createURI(litdupsNS, "Person")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), RDF.TYPE, vf.createURI(litdupsNS, "UndergraduateStudent")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB"), RDF.TYPE, vf.createURI(litdupsNS, "Student")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "PersonC"), RDF.TYPE, vf.createURI(litdupsNS, "Person")));
-        conn.commit();
-        conn.close();
-
-        internalInferenceEngine.refreshGraph();
-
-        conn = repository.getConnection();
-
-        //simple api first
-        RepositoryResult<Statement> person = conn.getStatements(null, RDF.TYPE, vf.createURI(litdupsNS, "Person"), true);
-        int count = 0;
-        while (person.hasNext()) {
-            count++;
-            person.next();
-        }
-        person.close();
-        assertEquals(3, count);
-
-        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s rdf:type lit:Person.}";
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s rdf:type lit:Student.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select * where {?s rdf:type lit:UndergraduateStudent.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testSameAs() throws Exception {
-        if(internalInferenceEngine == null) return; //infer not supported;
-
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA2")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA3")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB2")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB3")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), vf.createURI(litdupsNS, "pred1"), vf.createURI(litdupsNS, "StudentB3")));
-        conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), vf.createURI(litdupsNS, "pred2"), vf.createURI(litdupsNS, "StudentA3")));
-        conn.commit();
-        conn.close();
-
-        internalInferenceEngine.refreshGraph();
-
-        conn = repository.getConnection();
-
-        // query where finds sameAs for obj, pred specified
-        String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select ?s where {?s lit:pred1 lit:StudentB2.}";
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        // query where finds sameAs for obj only specified
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select ?s where {?s ?p lit:StudentB2.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount()); // including sameAs assertions
-
-        // query where finds sameAs for subj, pred specified
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select ?s where {lit:StudentB2 lit:pred2 ?s.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount()); // including sameAs assertions
-
-        // query where finds sameAs for subj only specified
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select ?s where {lit:StudentB2 ?p ?s.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount()); // including sameAs assertions
-
-        // query where finds sameAs for subj, obj specified
-        query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" +
-                "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" +
-                "PREFIX lit: <" + litdupsNS + ">\n" +
-                "select ?s where {lit:StudentB2 ?s lit:StudentA2.}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount()); 
-
-        conn.close();
-    }
-
-    public void testNamedGraphLoad() throws Exception {
-        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
-        assertNotNull(stream);
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(stream, "", RDFFormat.TRIG);
-        conn.commit();
-
-        String query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
-                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
-                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
-                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                "\n" +
-                "SELECT * \n" +
-//                "FROM NAMED <http://www.example.org/exampleDocument#G1>\n" +
-                "WHERE\n" +
-                "{\n" +
-                "  GRAPH ex:G1\n" +
-                "  {\n" +
-                "    ?m voc:name ?name ;\n" +
-                "           voc:homepage ?hp .\n" +
-                "  } .\n" +
-                " GRAPH ex:G2\n" +
-                "  {\n" +
-                "    ?m voc:hasSkill ?skill .\n" +
-                "  } .\n" +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-//        tupleQuery.evaluate(new PrintTupleHandler());
-        assertEquals(1, tupleHandler.getCount());
-
-        query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
-                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
-                "PREFIX  swp:  <http://www.w3.org/2004/03/trix/swp-1/>\n" +
-                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
-                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                "\n" +
-                "SELECT * \n" +
-                "WHERE\n" +
-                "{\n" +
-                "  GRAPH ex:G3\n" +
-                "  {\n" +
-                "    ?g swp:assertedBy ?w .\n" +
-                "    ?w swp:authority ex:Tom .\n" +
-                "  } .\n" +
-                "  GRAPH ?g\n" +
-                "  {\n" +
-                "    ?m voc:name ?name .\n" +
-                "  } .\n" +
-                "}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
-                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
-                "PREFIX  swp:  <http://www.w3.org/2004/03/trix/swp-1/>\n" +
-                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
-                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                "\n" +
-                "SELECT * \n" +
-                "WHERE\n" +
-                "{\n" +
-                "  GRAPH ?g\n" +
-                "  {\n" +
-                "    ?m voc:name ?name .\n" +
-                "  } .\n" +
-                "}";
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(BINDING_DISP_QUERYPLAN, VALUE_FACTORY.createLiteral(true));
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testNamedGraphLoad2() throws Exception {
-        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
-        assertNotNull(stream);
-        RepositoryConnection conn = repository.getConnection();
-        conn.add(stream, "", RDFFormat.TRIG);
-        conn.commit();
-
-        RepositoryResult<Statement> statements = conn.getStatements(null, vf.createURI("http://www.example.org/vocabulary#name"), null, true, vf.createURI("http://www.example.org/exampleDocument#G1"));
-        int count = 0;
-        while (statements.hasNext()) {
-            statements.next();
-            count++;
-        }
-        statements.close();
-        assertEquals(1, count);
-
-        conn.close();
-    }
-
-//    public void testNamedGraphLoadWInlineAuth() throws Exception {
-//        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
-//        assertNotNull(stream);
-//        URI auth1 = vf.createURI(RdfCloudTripleStoreConstants.AUTH_NAMESPACE, "1");
-//        RepositoryConnection conn = repository.getConnection();
-//        conn.add(stream, "", RDFFormat.TRIG, auth1);
-//        conn.commit();
-//
-//        String query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
-//                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
-//                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
-//                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
-//                "\n" +
-//                "SELECT * \n" +
-//                "WHERE\n" +
-//                "{\n" +
-//                "  GRAPH ex:G1\n" +
-//                "  {\n" +
-//                "    ?m voc:name ?name ;\n" +
-//                "           voc:homepage ?hp .\n" +
-//                "  } .\n" +
-//                " GRAPH ex:G2\n" +
-//                "  {\n" +
-//                "    ?m voc:hasSkill ?skill .\n" +
-//                "  } .\n" +
-//                "}";
-//        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("1"));
-//        CountTupleHandler tupleHandler = new CountTupleHandler();
-//        tupleQuery.evaluate(tupleHandler);
-//        assertEquals(1, tupleHandler.getCount());
-//
-//        query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
-//                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
-//                "PREFIX  swp:  <http://www.w3.org/2004/03/trix/swp-1/>\n" +
-//                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
-//                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
-//                "\n" +
-//                "SELECT * \n" +
-//                "WHERE\n" +
-//                "{\n" +
-//                "  GRAPH ex:G3\n" +
-//                "  {\n" +
-//                "    ?g swp:assertedBy ?w .\n" +
-//                "    ?w swp:authority ex:Tom .\n" +
-//                "  } .\n" +
-//                "  GRAPH ?g\n" +
-//                "  {\n" +
-//                "    ?m voc:name ?name .\n" +
-//                "  } .\n" +
-//                "}";
-//
-//        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleHandler = new CountTupleHandler();
-//        tupleQuery.evaluate(tupleHandler);
-//        assertEquals(0, tupleHandler.getCount());
-//
-//        conn.close();
-//    }
-
-    public void testNamedGraphLoadWAuth() throws Exception {
-        InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("namedgraphs.trig");
-        assertNotNull(stream);
-
-        RdfCloudTripleStore tstore = new MockRdfCloudStore();
-        NamespaceManager nm = new NamespaceManager(tstore.getRyaDAO(), tstore.getConf());
-        tstore.setNamespaceManager(nm);
-        SailRepository repo = new SailRepository(tstore);
-        tstore.getRyaDAO().getConf().setCv("1|2");
-        repo.initialize();
-
-        RepositoryConnection conn = repo.getConnection();
-        conn.add(stream, "", RDFFormat.TRIG);
-        conn.commit();
-
-        String query = "PREFIX  ex:  <http://www.example.org/exampleDocument#>\n" +
-                "PREFIX  voc:  <http://www.example.org/vocabulary#>\n" +
-                "PREFIX  foaf:  <http://xmlns.com/foaf/0.1/>\n" +
-                "PREFIX  rdfs:  <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                "\n" +
-                "SELECT * \n" +
-//                "FROM NAMED <http://www.example.org/exampleDocument#G1>\n" +
-                "WHERE\n" +
-                "{\n" +
-                "  GRAPH ex:G1\n" +
-                "  {\n" +
-                "    ?m voc:name ?name ;\n" +
-                "           voc:homepage ?hp .\n" +
-                "  } .\n" +
-                " GRAPH ex:G2\n" +
-                "  {\n" +
-                "    ?m voc:hasSkill ?skill .\n" +
-                "  } .\n" +
-                "}";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, vf.createLiteral("2"));
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(1, tupleHandler.getCount());
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); //no auth
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount());
-
-        conn.close();
-
-        repo.shutDown();
-    }
-
-    public void testInsertDeleteData() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "INSERT DATA\n" +
-                "{ <http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}";
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "select * where { <http://example/book3> ?p ?o. }";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        String delete = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "\n" +
-                "DELETE DATA\n" +
-                "{ <http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}";
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, delete);
-        update.execute();
-
-        query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "select * where { <http://example/book3> ?p ?o. }";
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public void testUpdateData() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "INSERT DATA\n" +
-                "{ GRAPH ex:G1 {\n" +
-                "<http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}\n" +
-                "}";
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "select * where { <http://example/book3> ?p ?o. }";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        String insdel = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "\n" +
-                "WITH <http://example/addresses#G1>\n" +
-                "DELETE { ?book dc:title ?title }\n" +
-                "INSERT { ?book dc:title \"A newer book\"." +
-                "         ?book dc:add \"Additional Info\" }\n" +
-                "WHERE\n" +
-                "  { ?book dc:creator \"A.N.Other\" ;\n" +
-                "        dc:title ?title .\n" +
-                "  }";
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, insdel);
-        update.execute();
-
-        query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "select * where { GRAPH ex:G1 {<http://example/book3> ?p ?o. } }";
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(3, tupleHandler.getCount());
-
-        conn.close();
-    }
-    
-    public void testClearGraph() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "INSERT DATA\n" +
-                "{ GRAPH ex:G1 {\n" +
-                "<http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}\n" +
-                "}";
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "INSERT DATA\n" +
-                "{ GRAPH ex:G2 {\n" +
-                "<http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}\n" +
-                "}";
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "select * where { <http://example/book3> ?p ?o. }";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(4, tupleHandler.getCount());
-        
-        tupleHandler = new CountTupleHandler();
-        conn.clear(new URIImpl("http://example/addresses#G2"));
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        tupleHandler = new CountTupleHandler();
-        conn.clear(new URIImpl("http://example/addresses#G1"));
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount());
-
-        conn.close();
-    }
-    
-    public void testClearAllGraph() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "INSERT DATA\n" +
-                "{ GRAPH ex:G1 {\n" +
-                "<http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}\n" +
-                "}";
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "INSERT DATA\n" +
-                "{ GRAPH ex:G2 {\n" +
-                "<http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}\n" +
-                "}";
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "select * where { <http://example/book3> ?p ?o. }";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(4, tupleHandler.getCount());
-        
-        tupleHandler = new CountTupleHandler();
-        conn.clear();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount());
-
-        conn.close();
-    }
-    
-    public void testDropGraph() throws Exception {
-        RepositoryConnection conn = repository.getConnection();
-
-        String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "INSERT DATA\n" +
-                "{ GRAPH ex:G1 {\n" +
-                "<http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}\n" +
-                "}";
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "PREFIX ex: <http://example/addresses#>\n" +
-                "INSERT DATA\n" +
-                "{ GRAPH ex:G2 {\n" +
-                "<http://example/book3> dc:title    \"A new book\" ;\n" +
-                "                         dc:creator  \"A.N.Other\" .\n" +
-                "}\n" +
-                "}";
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
-        update.execute();
-
-        String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" +
-                "select * where { <http://example/book3> ?p ?o. }";
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        CountTupleHandler tupleHandler = new CountTupleHandler();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(4, tupleHandler.getCount());
-        
-        tupleHandler = new CountTupleHandler();
-        String drop = "PREFIX ex: <http://example/addresses#>\n" +
-                "DROP GRAPH ex:G2 ";
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, drop);
-        update.execute();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(2, tupleHandler.getCount());
-
-        tupleHandler = new CountTupleHandler();
-        drop = "PREFIX ex: <http://example/addresses#>\n" +
-                "DROP GRAPH ex:G1 ";
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, drop);
-        update.execute();
-        tupleQuery.evaluate(tupleHandler);
-        assertEquals(0, tupleHandler.getCount());
-
-        conn.close();
-    }
-
-    public static class CountTupleHandler implements TupleQueryResultHandler {
-
-        int count = 0;
-
-        @Override
-        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-            count++;
-        }
-
-        public int getCount() {
-            return count;
-        }
-
-        @Override
-        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-        }
-
-        @Override
-        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-        }
-    }
-
-    private static class PrintTupleHandler implements TupleQueryResultHandler {
-        
-
-        @Override
-        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-            System.out.println(bindingSet);
-        }
-
-        @Override
-        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-        }
-
-        @Override
-        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-        }
-    }
-
-    public class MockRdfCloudStore extends RdfCloudTripleStore {
-
-        public MockRdfCloudStore() {
-            super();
-            Instance instance = new MockInstance();
-            try {
-                AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-                setConf(conf);
-                Connector connector = instance.getConnector("", "");
-                AccumuloRyaDAO cdao = new AccumuloRyaDAO();
-                cdao.setConf(conf);
-                cdao.setConnector(connector);
-                setRyaDAO(cdao);
-                inferenceEngine = new InferenceEngine();
-                inferenceEngine.setRyaDAO(cdao);
-                inferenceEngine.setRefreshGraphSchedule(5000); //every 5 sec
-                inferenceEngine.setConf(conf);
-                setInferenceEngine(inferenceEngine);
-                internalInferenceEngine = inferenceEngine;
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-    }
-}


[02/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/RangesScanner.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/RangesScanner.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/RangesScanner.java
deleted file mode 100644
index 9de3893..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/RangesScanner.java
+++ /dev/null
@@ -1,236 +0,0 @@
-package mvm.rya.cloudbase.utils.scanner;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import com.google.common.collect.Iterators;
-import org.apache.hadoop.io.Text;
-
-import java.io.IOException;
-import java.util.*;
-
-/**
- * This class will decorate a List of Scanners and treat it as one BatchScanner.
- * Each Scanner in the List corresponds to a separate Range.
- * The reason we are doing this and not just using BatchScanner is because,
- * the Scanner class will return information sorted and is more performant on
- * larger amounts of data.
- */
-public class RangesScanner implements BatchScanner, Scanner {
-
-    private List<Scanner> scanners = new ArrayList<Scanner>();
-    private Connector connector;
-    private String table;
-    private Authorizations authorizations;
-
-    public RangesScanner(Connector connector, String table, Authorizations authorizations) {
-        this.connector = connector;
-        this.table = table;
-        this.authorizations = authorizations;
-    }
-
-    @Override
-    public void setRanges(Collection<Range> ranges) {
-        try {
-            scanners.clear(); //no need to close them since Scanners do their own cleanup
-            for (Range range : ranges) {
-                Scanner scanner = connector.createScanner(table, authorizations);
-                scanner.setRange(range);
-            }
-        } catch (Exception e) {
-            throw new RuntimeException(e); //TODO: Better exception handling
-        }
-    }
-
-    @Override
-    public void setTimeOut(int i) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setTimeOut(i);
-        }
-    }
-
-    @Override
-    public int getTimeOut() {
-        return 0;
-    }
-
-    @Override
-    public void setRange(Range range) {
-        //TODO: How to set only one range
-    }
-
-    @Override
-    public Range getRange() {
-        return null; //TODO: How to get only one range
-    }
-
-    @Override
-    public void setBatchSize(int i) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setBatchSize(i);
-        }
-    }
-
-    @Override
-    public int getBatchSize() {
-        return 0; //TODO: What does this mean with multiple scanners?
-    }
-
-    @Override
-    public void enableIsolation() {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.enableIsolation();
-        }
-    }
-
-    @Override
-    public void disableIsolation() {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.disableIsolation();
-        }
-    }
-
-    @Override
-    public Iterator<Map.Entry<Key, Value>> iterator() {
-        //TODO: Lazy load iterator to only open the next scanner iterator after the first one is done
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        List<Iterator<Map.Entry<Key,Value>>> iterators = new ArrayList<Iterator<Map.Entry<Key, Value>>>();
-        for(Scanner scanner: scanners) {
-            iterators.add(scanner.iterator());
-        }
-        return Iterators.concat(iterators.toArray(new Iterator[]{}));
-    }
-
-    @Override
-    public void close() {
-        //scanners do not close
-    }
-
-    @Override
-    public void setScanIterators(int i, String s, String s1) throws IOException {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setScanIterators(i, s, s1);
-        }
-    }
-
-    @Override
-    public void setScanIteratorOption(String s, String s1, String s2) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setScanIteratorOption(s, s1, s2);
-        }
-    }
-
-    @Override
-    public void setupRegex(String s, int i) throws IOException {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setupRegex(s, i);
-        }
-    }
-
-    @Override
-    public void setRowRegex(String s) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setRowRegex(s);
-        }
-    }
-
-    @Override
-    public void setColumnFamilyRegex(String s) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setColumnFamilyRegex(s);
-        }
-    }
-
-    @Override
-    public void setColumnQualifierRegex(String s) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setColumnQualifierRegex(s);
-        }
-    }
-
-    @Override
-    public void setValueRegex(String s) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.setValueRegex(s);
-        }
-    }
-
-    @Override
-    public void fetchColumnFamily(Text text) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.fetchColumnFamily(text);
-        }
-    }
-
-    @Override
-    public void fetchColumn(Text text, Text text1) {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.fetchColumn(text, text1);
-        }
-    }
-
-    @Override
-    public void clearColumns() {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.clearColumns();
-        }
-    }
-
-    @Override
-    public void clearScanIterators() {
-        if(scanners.size() == 0) {
-            throw new IllegalArgumentException("Set Ranges first to initalize underneath scanners"); //TODO: if we save this info we don't need this check
-        }
-        for(Scanner scanner: scanners) {
-            scanner.clearScanIterators();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashAlgorithm.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashAlgorithm.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashAlgorithm.java
deleted file mode 100644
index e4afe27..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashAlgorithm.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package mvm.rya.cloudbase.utils.shard;
-
-public interface HashAlgorithm {
-
-  /**
-   * @return a positive integer hash
-   */
-  long hash(final String k);
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashCodeHashAlgorithm.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashCodeHashAlgorithm.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashCodeHashAlgorithm.java
deleted file mode 100644
index a43ea90..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/HashCodeHashAlgorithm.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package mvm.rya.cloudbase.utils.shard;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/18/12
- * Time: 10:28 AM
- * To change this template use File | Settings | File Templates.
- */
-public class HashCodeHashAlgorithm implements HashAlgorithm{
-    @Override
-    public long hash(String k) {
-        return Math.abs(k.hashCode());
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedBatchWriter.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedBatchWriter.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedBatchWriter.java
deleted file mode 100644
index d1c0ec1..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedBatchWriter.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package mvm.rya.cloudbase.utils.shard;
-
-import cloudbase.core.client.MutationsRejectedException;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.util.ArgumentChecker;
-
-/**
- * TODO: Think about what execption to use here
- * Created by IntelliJ IDEA.
- * Date: 4/18/12
- * Time: 10:27 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ShardedBatchWriter {
-    private ShardedConnector shardedConnector;
-
-    public ShardedBatchWriter(ShardedConnector shardedConnector) {
-        ArgumentChecker.notNull(shardedConnector);
-        this.shardedConnector = shardedConnector;
-    }
-
-    //addMutation
-    public void addMutation(Mutation mutation, String key) throws MutationsRejectedException {
-        shardedConnector.addMutation(mutation, key);
-    }
-
-    public void addMutations(Iterable<Mutation> mutations, String key) throws MutationsRejectedException {
-        shardedConnector.addMutations(mutations, key);
-    }
-    //flush
-    public void flush() throws MutationsRejectedException {
-        shardedConnector.commitWriters();
-    }
-    public void flush(String key) throws MutationsRejectedException {
-        shardedConnector.retrieveBatchWriter(key).flush();
-    }
-    //close
-    public void close() throws MutationsRejectedException {
-        //commit?
-        flush();
-        //maybe do nothing here because the writers are in the connector
-    }
-
-    public ShardedConnector getShardedConnector() {
-        return shardedConnector;
-    }
-
-    public void setShardedConnector(ShardedConnector shardedConnector) {
-        this.shardedConnector = shardedConnector;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedConnector.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedConnector.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedConnector.java
deleted file mode 100644
index 40d513e..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/shard/ShardedConnector.java
+++ /dev/null
@@ -1,158 +0,0 @@
-package mvm.rya.cloudbase.utils.shard;
-
-import cloudbase.core.client.*;
-import cloudbase.core.client.admin.TableOperations;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.util.ArgumentChecker;
-import mvm.rya.cloudbase.utils.scanner.BatchScannerList;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/18/12
- * Time: 10:20 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ShardedConnector {
-    //TODO: Use Ketema and perform proper Consistent Hashing
-    private Connector connector;
-    private int numShards;
-    private String tablePrefix;
-    private HashAlgorithm hashAlgorithm;
-
-    private Map<String, BatchWriter> writers = new HashMap<String, BatchWriter>();
-    private boolean initialized = false;
-
-    public ShardedConnector(Connector connector, int numShards, String tablePrefix, HashAlgorithm hashAlgorithm) {
-        ArgumentChecker.notNull(connector);
-        ArgumentChecker.notNull(tablePrefix);
-        if (numShards <= 0) {
-            throw new IllegalArgumentException("Number of shards cannot be 0");
-        }
-        if (hashAlgorithm == null) {
-            this.hashAlgorithm = new HashCodeHashAlgorithm();
-        }
-        this.connector = connector;
-        this.numShards = numShards;
-        this.tablePrefix = tablePrefix;
-    }
-
-    //createShardedBatchScanner
-    public BatchScanner createBatchScanner(String key, Authorizations authorizations, int numQueryThreads) throws TableNotFoundException {
-        List<BatchScanner> scanners = new ArrayList<BatchScanner>();
-        if (key != null) {
-            String shardTableName = buildShardTablename(key);
-            scanners.add(connector.createBatchScanner(shardTableName, authorizations, numQueryThreads));
-        } else {
-            //TODO: Use Ketema to do proper Consistent Hashing
-            for (int i = 0; i < numShards; i++) {
-                String shardTablename = buildShardTablename(i);
-                scanners.add(connector.createBatchScanner(shardTablename, authorizations, numQueryThreads)); //TODO: Will make scanner.size * numThreads = number of threads.
-            }
-        }
-        return new BatchScannerList(scanners);
-    }
-    //createShardedScanner
-
-    public ShardedBatchWriter createBatchWriter() throws TableNotFoundException {
-        return new ShardedBatchWriter(this);
-    }
-
-
-    protected void addMutation(Mutation mutation, String key) throws MutationsRejectedException {
-        retrieveBatchWriter(key).addMutation(mutation);
-    }
-
-    protected void addMutations(Iterable<Mutation> mutations, String key) throws MutationsRejectedException {
-        retrieveBatchWriter(key).addMutations(mutations);
-    }
-
-    public void init() throws Exception {
-        if (isInitialized()) {
-            throw new UnsupportedOperationException("ShardedConnector already initialized");
-        }
-        //init tables
-        TableOperations tableOperations = connector.tableOperations();
-        //create writers
-        //TODO: Use Ketema to do proper Consistent Hashing
-        for (int i = 0; i < numShards; i++) {
-            String shardTablename = buildShardTablename(i);
-            if (!tableOperations.exists(shardTablename)) {
-                tableOperations.create(shardTablename);
-            }
-            writers.put(shardTablename, connector.createBatchWriter(shardTablename, 1000000l, 60000l, 2)); //TODO: configurable
-        }
-
-        initialized = true;
-    }
-
-    public void close() throws Exception {
-        //close writers
-        for (Map.Entry<String, BatchWriter> entry : writers.entrySet()) {
-            entry.getValue().close();
-        }
-        initialized = false;
-    }
-
-    protected BatchWriter retrieveBatchWriter(String key) {
-        String tableName = buildShardTablename(key);
-        return writers.get(tableName);
-    }
-
-    protected void commitWriters() throws MutationsRejectedException {
-        for (Map.Entry<String, BatchWriter> entry : writers.entrySet()) {
-            entry.getValue().flush();
-        }
-    }
-
-    protected String buildShardTablename(String key) {
-        long shard = hashAlgorithm.hash(key) % numShards;
-        return buildShardTablename(shard);
-    }
-
-    protected String buildShardTablename(long shardId) {
-        return tablePrefix + shardId;
-    }
-
-
-    public Connector getConnector() {
-        return connector;
-    }
-
-    public void setConnector(Connector connector) {
-        this.connector = connector;
-    }
-
-    public int getNumShards() {
-        return numShards;
-    }
-
-    public void setNumShards(int numShards) {
-        this.numShards = numShards;
-    }
-
-    public String getTablePrefix() {
-        return tablePrefix;
-    }
-
-    public void setTablePrefix(String tablePrefix) {
-        this.tablePrefix = tablePrefix;
-    }
-
-    public HashAlgorithm getHashAlgorithm() {
-        return hashAlgorithm;
-    }
-
-    public void setHashAlgorithm(HashAlgorithm hashAlgorithm) {
-        this.hashAlgorithm = hashAlgorithm;
-    }
-
-    public boolean isInitialized() {
-        return initialized;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/test/java/cloudbase/core/client/impl/DocumentTabletServerBatchReaderIteratorTest.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/test/java/cloudbase/core/client/impl/DocumentTabletServerBatchReaderIteratorTest.java b/utils/cloudbase.utils/src/test/java/cloudbase/core/client/impl/DocumentTabletServerBatchReaderIteratorTest.java
deleted file mode 100644
index a795929..0000000
--- a/utils/cloudbase.utils/src/test/java/cloudbase/core/client/impl/DocumentTabletServerBatchReaderIteratorTest.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package cloudbase.core.client.impl;
-
-import junit.framework.TestCase;
-
-/**
- * Class DocumentTabletServerBatchReaderIteratorTest
- * Date: Sep 8, 2011
- * Time: 9:11:00 AM
- */
-public class DocumentTabletServerBatchReaderIteratorTest extends TestCase {
-
-    public void testSomething() {
-        
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilterTest.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilterTest.java b/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilterTest.java
deleted file mode 100644
index 15cf731..0000000
--- a/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilterTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package mvm.rya.cloudbase.utils.filters;
-
-import cloudbase.core.data.Key;
-import junit.framework.TestCase;
-import org.apache.hadoop.io.Text;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Class TimeRangeFilterTest
- * Date: Mar 23, 2011
- * Time: 10:08:58 AM
- */
-public class TimeRangeFilterTest extends TestCase {
-
-    public void testTimeRange() throws Exception {
-        TimeRangeFilter filter = new TimeRangeFilter();
-        Map<String, String> map = new HashMap<String, String>();
-        map.put(TimeRangeFilter.TIME_RANGE_PROP, "10000");
-        map.put(TimeRangeFilter.START_TIME_PROP, "1010001");
-        filter.init(map);
-
-        assertFalse(filter.accept(new Key(new Text("row1"), 1000000), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1000001), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1000011), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), 1010001), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), 1010002), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), 1010012), null));
-    }
-
-    public void testTimeRangeSetOptions() throws Exception {
-        try {
-            TimeRangeFilter filter = new TimeRangeFilter();
-            Map<String, String> map = new HashMap<String, String>();
-            filter.init(map);
-            fail();
-        } catch (Exception e) {
-        }
-    }
-
-    public void testTimeRangeCurrentTime() throws Exception {
-        long currentTime = System.currentTimeMillis();
-        TimeRangeFilter filter = new TimeRangeFilter();
-        Map<String, String> map = new HashMap<String, String>();
-        map.put(TimeRangeFilter.TIME_RANGE_PROP, "10000");
-        filter.init(map);
-
-        assertFalse(filter.accept(new Key(new Text("row1"), currentTime - 15000), null));
-        assertTrue(filter.accept(new Key(new Text("row1"), currentTime - 5000), null));
-        assertFalse(filter.accept(new Key(new Text("row1"), currentTime + 5000), null));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/shard/ShardedConnectorTest.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/shard/ShardedConnectorTest.java b/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/shard/ShardedConnectorTest.java
deleted file mode 100644
index 96afedb..0000000
--- a/utils/cloudbase.utils/src/test/java/mvm/rya/cloudbase/utils/shard/ShardedConnectorTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package mvm.rya.cloudbase.utils.shard;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.admin.TableOperations;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import com.google.common.collect.Iterators;
-import junit.framework.TestCase;
-import org.apache.hadoop.io.Text;
-
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/18/12
- * Time: 11:59 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ShardedConnectorTest extends TestCase {
-    public static final Text CF = new Text("cf");
-    public static final Text CQ = new Text("cq");
-    public static final Value EMPTY_VALUE = new Value(new byte[0]);
-    private ShardedConnector shardedConnector;
-    private Connector connector;
-
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        connector = new MockInstance("shardConnector").getConnector("", "".getBytes());
-        shardedConnector = new ShardedConnector(connector, 10, "tst_", null);
-        shardedConnector.init();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        shardedConnector.close();
-    }
-
-    public void testTablesCreated() throws Exception {
-        TableOperations tableOperations = connector.tableOperations();
-        SortedSet<String> list = tableOperations.list();
-        assertTrue(list.containsAll(Arrays.asList("tst_0","tst_1","tst_2","tst_3","tst_4","tst_5","tst_6","tst_7","tst_8","tst_9")));
-    }
-    
-    public void testAddMutationByKey() throws Exception {
-        Mutation mutation = new Mutation(new Text("a"));
-        mutation.put(CF, CQ, EMPTY_VALUE);
-        ShardedBatchWriter batchWriter = shardedConnector.createBatchWriter();
-        batchWriter.addMutation(mutation, "1");
-        batchWriter.flush();
-
-        BatchScanner batchScanner = shardedConnector.createBatchScanner("1", CBConstants.NO_AUTHS, 1);
-        batchScanner.setRanges(Collections.singleton(new Range()));
-        Iterator<Map.Entry<Key,Value>> iterator = batchScanner.iterator();
-        assertEquals(1, Iterators.size(iterator));
-        batchScanner.close();
-
-        batchScanner = shardedConnector.createBatchScanner(null, CBConstants.NO_AUTHS, 1);
-        batchScanner.setRanges(Collections.singleton(new Range()));
-        iterator = batchScanner.iterator();
-        assertEquals(1, Iterators.size(iterator));
-        batchScanner.close();
-
-        batchScanner = shardedConnector.createBatchScanner("2", CBConstants.NO_AUTHS, 1);
-        batchScanner.setRanges(Collections.singleton(new Range()));
-        iterator = batchScanner.iterator();
-        assertEquals(0, Iterators.size(iterator));
-        batchScanner.close();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/pom.xml
----------------------------------------------------------------------
diff --git a/utils/pom.xml b/utils/pom.xml
deleted file mode 100644
index a10cbdb..0000000
--- a/utils/pom.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <artifactId>rya.utils</artifactId>
-    <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
-
-    <profiles>
-        <profile>
-            <id>cloudbase</id>
-            <modules>
-                <module>cloudbase.utils</module>
-            </modules>
-        </profile>
-    </profiles>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/pom.xml
----------------------------------------------------------------------
diff --git a/web/pom.xml b/web/pom.xml
index 1ef8e90..bcc7f7a 100644
--- a/web/pom.xml
+++ b/web/pom.xml
@@ -1,15 +1,37 @@
 <?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.web</artifactId>
+    <name>Apache Rya Web Projects</name>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <modules>
         <module>web.rya</module>
     </modules>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/pom.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/pom.xml b/web/web.rya/pom.xml
index fdba91e..204bbef 100644
--- a/web/web.rya/pom.xml
+++ b/web/web.rya/pom.xml
@@ -1,66 +1,102 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.web</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>web.rya</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
+    <name>Apache Rya Web Implementation</name>
+
     <packaging>war</packaging>
-    <properties>
-        <spring.version>3.2.6.RELEASE</spring.version>
-    </properties>
+
     <dependencies>
         <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.api</artifactId>
         </dependency>
         <dependency>
-            <groupId>org.springframework.data</groupId>
-            <artifactId>spring-data-hadoop</artifactId>
-            <version>1.0.2.RELEASE</version>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.sail</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>accumulo.rya</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.prospector</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.indexing</artifactId>
         </dependency>
 
         <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.api</artifactId>
+            <groupId>org.openrdf.sesame</groupId>
+            <artifactId>sesame-rio-rdfxml</artifactId>
         </dependency>
         <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.sail.impl</artifactId>
+            <groupId>org.openrdf.sesame</groupId>
+            <artifactId>sesame-queryresultio-sparqljson</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.data</groupId>
+            <artifactId>spring-data-hadoop</artifactId>
         </dependency>
+
         <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-context</artifactId>
-            <version>${spring.version}</version>
         </dependency>
         <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-core</artifactId>
-            <version>${spring.version}</version>
         </dependency>
         <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-web</artifactId>
-            <version>${spring.version}</version>
         </dependency>
         <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-webmvc</artifactId>
-            <version>${spring.version}</version>
         </dependency>
         <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-beans</artifactId>
-            <version>${spring.version}</version>
         </dependency>
         <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-test</artifactId>
-            <version>${spring.version}</version>
         </dependency>
+
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+        </dependency>
+
         <dependency>
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-log4j12</artifactId>
@@ -68,40 +104,17 @@
         <dependency>
             <groupId>commons-pool</groupId>
             <artifactId>commons-pool</artifactId>
-            <version>1.5.1</version>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>accumulo.rya</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.prospector</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-       <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-rdfxml</artifactId>
- 			<version>${openrdf.sesame.version}</version>
         </dependency>
+
         <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
+            <scope>test</scope>
         </dependency>
-		<dependency>
-			<groupId>org.openrdf.sesame</groupId>
-			<artifactId>sesame-queryresultio-sparqljson</artifactId>
-			<version>${openrdf.sesame.version}</version>
-		</dependency>
         <dependency>
             <groupId>org.mockito</groupId>
-            <artifactId>mockito-core</artifactId>
-            <version>1.9.5</version>
-        </dependency>
-        <dependency>
-            <groupId>org.hamcrest</groupId>
-            <artifactId>hamcrest-all</artifactId>
-            <version>1.3</version>
+            <artifactId>mockito-all</artifactId>
+            <scope>test</scope>
         </dependency>
     </dependencies>
     <build>
@@ -118,60 +131,6 @@
                     </webAppConfig>
                 </configuration>
             </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <configuration>
-                    <source>1.6</source>
-                    <target>1.6</target>
-                </configuration>
-            </plugin>
         </plugins>
     </build>
-
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>accumulo.iterators</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>cloudbase.iterators</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>indexing</id>
-            <dependencies>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>rya.indexing</artifactId>
-                    <version>${project.version}</version>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/resources/environment.properties
----------------------------------------------------------------------
diff --git a/web/web.rya/resources/environment.properties b/web/web.rya/resources/environment.properties
index 405f285..7848a4e 100644
--- a/web/web.rya/resources/environment.properties
+++ b/web/web.rya/resources/environment.properties
@@ -1,3 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
 instance.name=cloudbase
 instance.zk=localhost:2181
 instance.username=root
@@ -7,4 +24,4 @@ rya.displayqueryplan=true
 mongo.db.collectionprefix=rya_
 mongo.db.instance=localhost
 mongo.db.name=rya
-mongo.db.port=21017
\ No newline at end of file
+mongo.db.port=21017

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java
index 70dfc5b..313a3c3 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/AbstractRDFWebServlet.java
@@ -1,24 +1,24 @@
-//package mvm.cloud.rdf.web.cloudbase.sail;
-
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.cloud.rdf.web.cloudbase.sail;
+
 //
 //import cloudbase.core.client.Connector;
 //import cloudbase.core.client.ZooKeeperInstance;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java
index 5b922f7..661fe38 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServlet.java
@@ -1,24 +1,24 @@
-//package mvm.cloud.rdf.web.cloudbase.sail;
-
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.cloud.rdf.web.cloudbase.sail;
+
 //
 //import org.openrdf.query.QueryLanguage;
 //import org.openrdf.query.TupleQuery;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java
index a557d57..175ef2a 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServlet.java
@@ -1,24 +1,24 @@
-//package mvm.cloud.rdf.web.cloudbase.sail;
-
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.cloud.rdf.web.cloudbase.sail;
+
 //
 //import org.openrdf.model.Resource;
 //import org.openrdf.repository.RepositoryConnection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java
index 8e4962c..dfcd035 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java
@@ -1,24 +1,24 @@
-//package mvm.cloud.rdf.web.cloudbase.sail;
-
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.cloud.rdf.web.cloudbase.sail;
+
 //
 //import RdfCloudTripleStoreConstants;
 //import RdfCloudTripleStoreConstants;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java
index 11db0d6..b1eb5e3 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/QuerySerqlDataServlet.java
@@ -1,24 +1,24 @@
-//package mvm.cloud.rdf.web.cloudbase.sail;
-
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.cloud.rdf.web.cloudbase.sail;
+
 //
 //import org.openrdf.query.GraphQuery;
 //import org.openrdf.query.QueryLanguage;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java
index 1bed3a7..16cfe71 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/cloudbase/sail/RDFWebConstants.java
@@ -1,24 +1,24 @@
-//package mvm.cloud.rdf.web.cloudbase.sail;
-
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.cloud.rdf.web.cloudbase.sail;
+
 //
 ///**
 // * Interface RDFWebConstants

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java
index b3ff85f..bc6272a 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/RdfController.java
@@ -1,25 +1,26 @@
 package mvm.cloud.rdf.web.sail;
 
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.api.RdfCloudTripleStoreConstants.AUTH_NAMESPACE;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.VALUE_FACTORY;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java
index 2be1ec3..7e763d5 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/ResultFormat.java
@@ -1,25 +1,26 @@
 package mvm.cloud.rdf.web.sail;
 
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 public enum ResultFormat {
     XML, JSON, JSONP
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java
index ab775d4..58fc0f4 100644
--- a/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java
+++ b/web/web.rya/src/main/java/mvm/cloud/rdf/web/sail/SecurityProviderImpl.java
@@ -1,5 +1,25 @@
 package mvm.cloud.rdf.web.sail;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import javax.servlet.http.HttpServletRequest;
 
 import mvm.rya.api.security.SecurityProvider;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml
index 0792e81..7f9caaf 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-accumulo.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
 	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
 	xmlns:context="http://www.springframework.org/schema/context"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml
index e59b5f8..8d5ee69 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-cloudbase.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
        xmlns:context="http://www.springframework.org/schema/context"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml
index 88094fe..85ea26e 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
 	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
 	xmlns:context="http://www.springframework.org/schema/context"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml
index fa31e21..67feae8 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-extensions.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
 	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
 	xmlns:context="http://www.springframework.org/schema/context"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml
index c354f99..b42a222 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
 	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
 	xmlns:context="http://www.springframework.org/schema/context"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml
index bd086f0..c15e9d3 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-security.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
 	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
 	xmlns:context="http://www.springframework.org/schema/context"

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/WEB-INF/web.xml b/web/web.rya/src/main/webapp/WEB-INF/web.xml
index 54e36ab..5f53e4b 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/web.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/web.xml
@@ -1,3 +1,23 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <!DOCTYPE web-app PUBLIC
         "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
         "http://java.sun.com/dtd/web-app_2_3.dtd" >

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/crossdomain.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/crossdomain.xml b/web/web.rya/src/main/webapp/crossdomain.xml
index c3b5339..cec91f6 100644
--- a/web/web.rya/src/main/webapp/crossdomain.xml
+++ b/web/web.rya/src/main/webapp/crossdomain.xml
@@ -1,5 +1,25 @@
 <?xml version="1.0"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <!DOCTYPE cross-domain-policy SYSTEM "http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd">
 <cross-domain-policy>
     <allow-access-from domain="*" secure="false"/>
-</cross-domain-policy>
\ No newline at end of file
+</cross-domain-policy>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/main/webapp/sparqlQuery.jsp
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/sparqlQuery.jsp b/web/web.rya/src/main/webapp/sparqlQuery.jsp
index 03a3c43..d026a50 100644
--- a/web/web.rya/src/main/webapp/sparqlQuery.jsp
+++ b/web/web.rya/src/main/webapp/sparqlQuery.jsp
@@ -1,3 +1,22 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <%@ page contentType="text/html; charset=iso-8859-1" language="java" %>
 <%@ page import="java.net.*" %>
 <%
@@ -57,4 +76,4 @@ Enter Sparql query here
 </table>
 </form>
 </body>
-</html>
\ No newline at end of file
+</html>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java
index 4520bd0..061bda9 100644
--- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java
+++ b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/DeleteDataServletRun.java
@@ -1,25 +1,26 @@
 package mvm.cloud.rdf.web.cloudbase.sail;
 
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.BufferedReader;
 import java.io.InputStreamReader;
 import java.net.URL;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java
index 13bd9ec..1bfc278 100644
--- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java
+++ b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/LoadDataServletRun.java
@@ -1,25 +1,26 @@
 package mvm.cloud.rdf.web.cloudbase.sail;
 
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.BufferedReader;
 import java.io.InputStream;
 import java.io.InputStreamReader;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java
index 3e0049e..e9d918a 100644
--- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java
+++ b/web/web.rya/src/test/java/mvm/cloud/rdf/web/cloudbase/sail/QueryDataServletRun.java
@@ -1,25 +1,26 @@
 package mvm.cloud.rdf.web.cloudbase.sail;
 
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.BufferedReader;
 import java.io.InputStreamReader;
 import java.net.URL;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java
index 5265484..eea0bad 100644
--- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java
+++ b/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerIntegrationTest.java
@@ -1,5 +1,25 @@
 package mvm.cloud.rdf.web.sail;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java b/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java
index 613bcfb..7888457 100644
--- a/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java
+++ b/web/web.rya/src/test/java/mvm/cloud/rdf/web/sail/RdfControllerTest.java
@@ -1,25 +1,26 @@
 package mvm.cloud.rdf.web.sail;
 
 /*
- * #%L
- * mvm.rya.web.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static org.hamcrest.Matchers.equalToIgnoringWhiteSpace;
 import static org.junit.Assert.assertTrue;
 import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/resources/cdrdf.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/resources/cdrdf.xml b/web/web.rya/src/test/resources/cdrdf.xml
index 0dbe6c2..96829f0 100644
--- a/web/web.rya/src/test/resources/cdrdf.xml
+++ b/web/web.rya/src/test/resources/cdrdf.xml
@@ -1,23 +1,24 @@
 <?xml version="1.0"?>
+
 <!--
-  #%L
-  mvm.rya.web.rya
-  %%
-  Copyright (C) 2014 Rya
-  %%
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  #L%
-  -->
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 
 <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
 	xmlns:cd="http://www.recshop.fake/cd#">
@@ -37,4 +38,4 @@
 		<cd:price>9.90</cd:price>
 		<cd:year>1993</cd:year>
 	</rdf:Description>
-</rdf:RDF>
\ No newline at end of file
+</rdf:RDF>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml b/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml
index 20120db..5b20d57 100644
--- a/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml
+++ b/web/web.rya/src/test/resources/controllerIntegrationTest-accumulo.xml
@@ -1,22 +1,23 @@
+<?xml version='1.0'?>
 <!--
-  #%L
-  mvm.rya.web.rya
-  %%
-  Copyright (C) 2014 Rya
-  %%
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  #L%
-  -->
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
        xsi:schemaLocation="

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml
----------------------------------------------------------------------
diff --git a/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml b/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml
index e49edb1..7fab80d 100644
--- a/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml
+++ b/web/web.rya/src/test/resources/controllerIntegrationTest-root.xml
@@ -1,4 +1,25 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
        xmlns:context="http://www.springframework.org/schema/context"



[34/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java
index f58542d..ab3b0f1 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/TokenMgrError.java
@@ -3,25 +3,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 /** Token Manager Error. */
 public class TokenMgrError extends Error
 {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java
index 350ccc6..6cb01e1 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoConstants.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.geo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.URI;
 import org.openrdf.model.impl.URIImpl;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
index e012a7f..37acf89 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.geo;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java
index 5015534..e5c3adf 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoParseUtils.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.geo;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import org.apache.log4j.Logger;
 import org.openrdf.model.Literal;
 import org.openrdf.model.Statement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java
index 237b73d..e7a5d68 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoTupleSet.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.geo;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
index 027a3b1..e2f98b3 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.temporal;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java
index 917095b..a69a79f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalInstantRfc3339.java
@@ -3,6 +3,26 @@
  */
 package mvm.rya.indexing.accumulo.temporal;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java
index 66bedce..f2ed8c4 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/TemporalTupleSet.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.temporal;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java
index 61627cf..c4e55be 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalIndexMain.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.File;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java
index b387806..2c6d924 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalProcessor.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external;
 
 /*
- * #%L
- * mvm.rya.rya.indexing
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java
index 7219c6d..772ffa4 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSail.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import org.openrdf.model.ValueFactory;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java
index 0bf7299..082dd99 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/ExternalSailExample.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.List;
 
 import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java
index 0d28f84..65a775f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/PrecompJoinOptimizer.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java
index aa1519a..d19c511 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/QueryVariableNormalizer.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.external;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java
index 47a8502..dda452d 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/AccumuloIndexSet.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external.tupleSet;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java
index 7061cac..0e2096d 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/ExternalTupleSet.java
@@ -1,44 +1,41 @@
 package mvm.rya.indexing.external.tupleSet;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.QueryEvaluationException;
 import org.openrdf.query.algebra.Projection;
-import org.openrdf.query.algebra.QueryModelNode;
-import org.openrdf.query.algebra.QueryModelVisitor;
 import org.openrdf.query.algebra.Var;
 import org.openrdf.query.algebra.evaluation.impl.ExternalSet;
 import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
 
 import com.beust.jcommander.internal.Sets;
 import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java
index f98ddc8..44925ca 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/external/tupleSet/SimpleExternalTupleSet.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.external.tupleSet;
 
 /*
- * #%L
- * mvm.rya.rya.indexing
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import org.openrdf.query.BindingSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
index dd61edc..4a708ab 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/AbstractMongoIndexer.java
@@ -1,15 +1,34 @@
 package mvm.rya.indexing.mongodb;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.IOException;
 import java.util.Collection;
 
-import org.apache.hadoop.conf.Configuration;
-import org.openrdf.model.Statement;
-
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaURI;
 import mvm.rya.api.persist.index.RyaSecondaryIndexer;
 
+import org.apache.hadoop.conf.Configuration;
+
 public abstract class AbstractMongoIndexer implements RyaSecondaryIndexer {
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
index d945ba3..0355225 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/GeoMongoDBStorageStrategy.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.mongodb;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
@@ -14,7 +34,6 @@ import mvm.rya.indexing.accumulo.geo.GeoParseUtils;
 import org.apache.commons.codec.binary.Hex;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;
-import org.openrdf.model.impl.ValueFactoryImpl;
 
 import com.mongodb.BasicDBList;
 import com.mongodb.BasicDBObject;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
index 35e69b1..c36b125 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoIndexer.java
@@ -1,11 +1,30 @@
 package mvm.rya.indexing.mongodb;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
 import java.net.UnknownHostException;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Set;
 
 import mvm.rya.api.domain.RyaStatement;
@@ -13,13 +32,11 @@ import mvm.rya.api.resolver.RyaToRdfConversions;
 import mvm.rya.indexing.GeoIndexer;
 import mvm.rya.indexing.StatementContraints;
 import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer;
 import mvm.rya.indexing.mongodb.GeoMongoDBStorageStrategy.GeoQueryType;
 import mvm.rya.mongodb.MongoDBRdfConfiguration;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
-import org.opengis.feature.simple.SimpleFeature;
 import org.openrdf.model.Literal;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;
@@ -33,7 +50,6 @@ import com.mongodb.MongoClient;
 import com.mongodb.MongoCredential;
 import com.mongodb.ServerAddress;
 import com.vividsolutions.jts.geom.Geometry;
-import com.vividsolutions.jts.io.ParseException;
 
 public class MongoGeoIndexer extends AbstractMongoIndexer implements GeoIndexer{
     

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java
index a325b06..da49904 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/mongodb/MongoGeoTupleSet.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.mongodb;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java b/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java
index 1e295b4..1515118 100644
--- a/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java
+++ b/extras/indexing/src/test/java/ValidIndexCombinationGeneratorTest.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 import static org.junit.Assert.*;
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java b/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java
index 075a111..8b76f7f 100644
--- a/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/accumulo/documentIndex/DocumentIndexIntersectingIteratorTest.java
@@ -1,4 +1,24 @@
 package mvm.rya.accumulo.documentIndex;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java
index edf5d57..bfea0bd 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/GeneralizedExternalProcessorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import static org.junit.Assert.*;
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java
index b83ef10..eea5b95 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexPlanValidatorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java
index ead7330..79a6656 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/IndexedExecutionPlanGeneratorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java
index 3292fed..f8da365 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelectorTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.IndexPlanValidator;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;



[20/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/QueryPartitionData.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/QueryPartitionData.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/QueryPartitionData.java
deleted file mode 100644
index 0f9e7e0..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/QueryPartitionData.java
+++ /dev/null
@@ -1,675 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.*;
-import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter;
-import org.openrdf.repository.Repository;
-import org.openrdf.repository.RepositoryConnection;
-import org.openrdf.repository.sail.SailRepository;
-
-import java.util.Calendar;
-import java.util.List;
-
-public class QueryPartitionData {
-
-    public void run() throws Exception {
-        try {
-            // if (args.length == 0) {
-            // throw new IllegalArgumentException("Specify query file");
-            // }
-            // String fileLoc = args[0];
-            // File queryFile = new File(fileLoc);
-            // final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-            // ByteStreams.copy(new FileInputStream(queryFile), baos);
-            // String query = new String(baos.toByteArray());
-
-            final PartitionSail store = new PartitionSail("stratus", "stratus13:2181", "root", "password",
-                    "partTest", "shardIndexTest");
-//            store.setTablePrefix("str_");
-//            store.getTimeUris().put(new URIImpl("http://here/2010/tracked-data-provenance/ns#performedAt"), DateTimeTtlValueConverter.class);
-//            store.getTimeUris().add(new URIImpl("http://mvm.com/rdf/2011/02/model#timestamp"));
-//            store.setPerformant(true);
-//            store.setUseStatistics(false);
-//            store.setInferencing(false);
-//            store.setDisplayQueryPlan(false);
-
-//            store.setStartTime("1302811169088");
-//            store.setTtl("86400000");
-//            store.setPerformant(false);
-//            store.setInstance("nimbus");
-//            store.setServer("10.40.189.123");
-            Repository myRepository = new SailRepository(store);
-            myRepository.initialize();
-
-//			BufferedOutputStream os = new BufferedOutputStream(
-//					new FileOutputStream("query.out"));
-
-            RepositoryConnection conn = myRepository.getConnection();
-
-            // String query =
-            // "SELECT ?pred ?obj WHERE { <http://mvm-model/mvm#mm_7afb494d-dddc-4f1c-8b5b-0413b3ebe783> ?pred ?obj }";
-            // String query =
-            // "SELECT ?pred ?obj WHERE { <http://mvm-model/mvm#mm_b10c2c18-80c3-41ef-8069-9df1a6e8879c> ?pred ?obj }";
-            // String query =
-            // "SELECT ?pred ?obj WHERE { <http://mvm-model/mvm#mm_001ca4e0-521f-440e-9e72-9b59bb14fd2c> ?pred ?obj }";
-
-            String cimHasInstalledSoftware = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "     ?serv mm:hasRunningOS ?obj .\n" +
-                    "      ?obj mm:name ?name ;\n" +
-//                    "           mm:caption ?caption .\n" +
-                    "}";
-
-            String cimHasRunningSoftware = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "     <http://mvm.com/owl/mm.owl#urn:mm:mvm:LTS:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:stratus06.bullpen.net:Red_Hat_Enterprise_Linux_Server> mm:hasRunningOS ?obj .\n" +
-                    "     ?obj mm:name ?name ; \n" +
-                    "          mm:handle ?handle ; \n" +
-                    "          mm:description ?description ; \n" +
-                    "          mm:caption ?caption . \n" +
-                    "}";
-
-            String artistQuery = "SELECT * WHERE { "
-                    + " ?subj ?pred \"Bonnie Tyler\" . "
-                    + "}";
-            String lubm1 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:lubm:test#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x ub:takesCourse <http://www.Department0.University0.edu/GraduateCourse0> .\n" +
-                    "     ?x rdf:type ub:GraduateStudent .\n" +
-                    "     ?x ub:name ?name .\n" +
-//                    "     FILTER regex(?name, \"GraduateStudent44\", \"i\") .\n" +
-                    " }";
-
-            String gradStudent44 = "SELECT * WHERE { <http://www.Department0.University0.edu/GraduateStudent44> ?p ?o.}";
-
-            String lubm4 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      { ?pred rdfs:subPropertyOf ub:worksFor . ?y ?pred <http://www.Department0.University0.edu> }\n" +
-                    "       UNION " +
-                    "      { ?y ub:worksFor <http://www.Department0.University0.edu> }\n" +
-                    "      { ?x rdfs:subClassOf ub:Professor . ?y rdf:type ?x }\n" +
-                    "       UNION " +
-                    "      { ?y rdf:type ub:Professor }\n" +
-                    "      ?y ub:name ?y1 .\n" +
-                    "      ?y ub:emailAddress ?y2 .\n" +
-                    "      ?y ub:telephone ?y3 .\n" +
-                    " }";
-
-            String lubm4_clean = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?y ub:worksFor <http://www.Department0.University0.edu>.\n" +
-                    "      ?y rdf:type ub:FullProfessor.\n" +
-                    "      ?y ub:name ?y1 .\n" +
-                    " } ORDER BY ?y";
-
-            String cimLatestMeasure = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT ?proc ?val ?time WHERE {\n" +
-                    "     ?proc mm:loadPercentage ?val .\n" +
-                    "     ?subj rdf:subject ?proc .\n" +
-                    "     ?subj rdf:object ?val2 .\n" +
-                    "     ?subj  rdf:type rdf:Statement ;\n" +
-                    "     \t    mm:reportedAt ?time .\n" +
-                    " FILTER (?val2 = ?val) }\n" +
-                    "ORDER BY DESC(?time)\n" +
-                    "LIMIT 25";
-
-            String cimHasFileSystemSpecific = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "     <http://mvm.com/owl/mm.owl#urn:mm:mvm:lts:root/cimv2:PG_OperatingSystem.CreationClassName=CIM_OperatingSystem,CSCreationClassName=CIM_UnitaryComputerSystem,CSName=roshan.bullpen.net,Name=Red_Hat_Enterprise_Linux_Server> mm:hasFileSystem ?obj ." +
-//                "     ?serv mm:hasFileSystem ?obj ." +
-                    "}";
-
-            String allObjects = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "   {?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#object> ?o.} }";
-
-            String deletePkgTrkr = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    "PREFIX prov: <http://this.doc/2010/tracked-data-provenance/ns#>\n" +
-                    " SELECT DISTINCT * WHERE\n" +
-                    " {\n" +
-                    "     ?subj rdf:type prov:AggregatePkgInfo.\n" +
-                    "     ?subj ?pred ?obj." +
-                    " }";
-
-            String lubm5 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      { ?pred rdfs:subPropertyOf ub:memberOf . ?thing ?pred <http://www.Department0.University0.edu> }\n" +
-                    "       UNION " +
-                    "      { ?thing ub:memberOf <http://www.Department0.University0.edu> }\n" +
-//                    "      { ?type rdfs:subClassOf ub:Person . ?thing rdf:type ?type }\n" +
-//                    "       UNION " +
-//                    "      { ?thing rdf:type ub:Person }\n" +
-                    " }";
-
-            String lubm5_clean = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?thing ub:memberOf <http://www.Department0.University0.edu>.\n" +
-                    "      ?thing rdf:type ub:Person. \n" +
-                    " }";
-
-            String lubm5_clean2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?pred rdfs:subPropertyOf ub:memberOf . ?thing ?pred <http://www.Department0.University0.edu> . ?type rdfs:subClassOf ub:Person . ?thing rdf:type ?type \n" +
-                    " }";
-
-            String lubm3 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "                 PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "                 SELECT * WHERE\n" +
-                    "                 {\n" +
-                    "                      ?x ub:publicationAuthor <http://www.Department0.University0.edu/AssistantProfessor3> .\n" +
-                    "                      ?x rdf:type ub:Publication.\n" +
-                    "                }";
-
-            String lubm2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "                 PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "                 SELECT * WHERE\n" +
-                    "                 {\n" +
-                    "                       ?y rdf:type ub:University .\n" +
-                    "                       ?z ub:subOrganizationOf ?y .\n" +
-                    "                       ?z rdf:type ub:Department .\n" +
-                    "                       ?x ub:memberOf ?z .\n" +
-                    "                       ?x ub:undergraduateDegreeFrom ?y .\n" +
-                    "                       ?x rdf:type ub:GraduateStudent .\n" +
-                    "                }";
-
-            String lubm2_a = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "                 PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "                 SELECT * WHERE\n" +
-                    "                 {\n" +
-//                    "                       ?y rdf:type ub:University .\n" +
-                    "                       ?z ub:subOrganizationOf <http://www.University700.edu> .\n" +
-                    "                       ?z rdf:type ub:Department .\n" +
-                    "                       ?x ub:memberOf ?z .\n" +
-                    "                       ?x ub:undergraduateDegreeFrom <http://www.University700.edu> .\n" +
-                    "                       ?x rdf:type ub:GraduateStudent .\n" +
-                    "                }";
-
-            String hasAlum = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    "PREFIX owl: <http://www.w3.org/2002/07/owl#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "        {<http://www.University1.edu> ub:hasAlumnus ?alum } \n" +
-                    "        UNION \n" +
-                    "        {ub:hasAlumnus owl:inverseOf ?invProp . \n" +
-                    "      { ?pred rdfs:subPropertyOf ?invProp . ?alum ?pred <http://www.University1.edu> }\n" +
-                    "         UNION \n" +
-                    "      { ?alum ?invProp <http://www.University0.edu> }}\n" +
-                    "      { ?type rdfs:subClassOf ub:Person . ?alum rdf:type ?type }\n" +
-                    "       UNION       { ?alum rdf:type ub:Person }\n" +
-                    "}";
-
-            String hasAlum_clean = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    "PREFIX owl: <http://www.w3.org/2002/07/owl#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "        <http://www.University1.edu> ub:hasAlumnus ?alum .\n" +
-                    "}";
-
-            String lubm7 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT DISTINCT ?course ?student WHERE\n" +
-                    " {\n" +
-                    " <http://www.Department0.University0.edu/AssociateProfessor0> ub:teacherOf ?course .\n" +
-                    " ?student ub:takesCourse ?course .\n" +
-                    "      { ?type rdfs:subClassOf ub:Student . ?student rdf:type ?type }\n" +
-                    "       UNION       { ?student rdf:type ub:Student }\n" +
-                    "      { ?type rdfs:subClassOf ub:Course . ?course rdf:type ?type }\n" +
-                    "       UNION       { ?course rdf:type ub:Course }\n" +
-                    " }";
-
-            String lubm7_clean = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    " <http://www.Department0.University0.edu/AssociateProfessor0> ub:teacherOf ?course .\n" +
-                    " ?student ub:takesCourse ?course .\n" +
-                    " ?student rdf:type ub:Student .\n" +
-                    " ?course rdf:type ub:Course .\n" +
-                    " }";
-
-            String lubm8_clean = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      ?suborg ub:subOrganizationOf <http://www.University0.edu> .\n" +
-                    "      ?mem ub:memberOf ?suborg .\n" +
-                    "      ?suborg rdf:type ub:Department .\n" +
-                    "        ?mem ub:emailAddress ?email. \n" +
-                    " }";
-
-            String lubm9 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "       ?teach ub:teacherOf ?course .\n" +
-                    "       ?teach ub:advisor ?student .\n" +
-                    "        ?student ub:takesCourse ?course .\n" +
-                    " }";
-
-            String kolm = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT DISTINCT ?srv ?ncd WHERE {\n" +
-                    "     ?pt kolm:serverRef <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:cirrus05.bullpen.net:Red_Hat_Enterprise_Linux_Server> .\n" +
-                    "     ?cluster kolm:relatesTo ?pt .\n" +
-                    "     ?cluster kolm:relatesTo ?pt2 .\n" +
-                    "     ?pt2 kolm:serverRef ?srv .\n" +
-                    "       ?pt2  kolm:ncd ?ncd .\n" +
-                    "       ?cluster kolm:timestamp ?ts .\n" +
-                    " } \n" +
-                    " ORDER BY ?ncd";
-
-            String kolm_tst = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX kolm: <http://mvm.com/lrn/2010/11/kolm#>\n" +
-                    "SELECT * WHERE {\n" +
-                    "     ?s <http://mvm.com/lrn/2010/11/kolm#relatesTo> <http://mvm.com/lrn/2010/11/kolm#fef60314-78f1-4918-ad03-e1aff835e858>\n" +
-                    " } ";
-
-            String lubm9_tst = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "       ?teach ub:teacherOf ?course .\n" +
-//                    "       ?teach ub:teacherOf <http://www.Department0.University0.edu/Course3> .\n" +
-                    "       ?student ub:advisor ?teach .\n" +
-                    "        ?student ub:takesCourse ?course .\n" +
-                    " }";
-
-            String bsbm1 = "PREFIX bsbm-inst: <http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/>\n" +
-                    "PREFIX bsbm: <http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/vocabulary/>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    "\n" +
-                    "SELECT *\n" +
-                    "WHERE {\n" +
-                    "    ?product a <http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/ProductType1315> .\n" +
-                    "    ?product bsbm:productFeature <http://www4.wiwiss.fu-berlin.de/bizer/bsbm/v01/instances/ProductFeature750> .\n" +
-                    "    ?product bsbm:productPropertyNumeric1 ?value1 .\n" +
-                    "    ?product rdfs:label ?label .\n" +
-                    "        FILTER (?value1 > 933) .\n" +
-                    "        }" +
-                    "LIMIT 10" +
-                    "";
-
-            String hbs = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                    "\n" +
-                    "select * where {\n" +
-                    " hb:f452f776-4994-43fc-ada0-4b60cc979dac ?p ?o .\n" +
-                    "}";
-
-            String hbmodel = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mod: <http://mvm.com/rdf/2011/02/model#>\n" +
-                    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    "SELECT ?s (SUM(?ts) AS ?sum) WHERE\n" +
-                    "{\n" +
-                    "    ?s mod:key \"messageCount/urn:system:LTS-01/A/A_Sports1/ProvenanceAPI/2/12/\";\n" +
-                    "        mod:model ?model;\n" +
-                    "        mod:timestamp ?ts.\n" +
-                    "}\n" +
-                    "GROUP BY ?s\n" +
-                    "ORDER BY DESC(?ts)\n";
-
-            String hbagg = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?systemName hb:reportedBy ?reportedBy;\n" +
-                    "                         hb:systemType ?systemType;\n" +
-                    "                         hb:heartbeat ?hbuuid.\n" +
-                    "     ?hbuuid hb:timestamp ?timestamp;\n" +
-                    "                         hb:messageCount ?messageCount.\n" +
-                    " }";
-
-            String runningProc = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT DISTINCT * WHERE\n" +
-                    " {\n" +
-                    "     <http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:PG_OperatingSystem:CIM_ComputerSystem:cirrus05.bullpen.net:Red_Hat_Enterprise_Linux_Server> mm:hasRunningProcess ?obj .\n" +
-                    "     ?obj mm:parameters ?params .\n" +
-                    "     ?obj mm:name ?name .\n" +
-                    " }";
-
-            String single = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "      <http://www.Department11.University370.edu/AssociateProfessor9> ?p ?o.\n" +
-                    " }";
-
-            String modelInnerSelect = "PREFIX nh: <http://mvm.com/rdf/2011/02/model#>\n" +
-                    "\n" +
-                    "SELECT * WHERE {\n" +
-                    "\n" +
-                    "{\n" +
-                    "SELECT ?key ?modelType (MAX(?timestamp) as ?ts) WHERE {\n" +
-                    "     ?modelUuid nh:key ?key;\n" +
-                    "     FILTER regex(?key, \"messageCount(.*)/2/13\").\n" +
-                    "     ?modelUuid nh:modelType ?modelType;\n" +
-                    "                        nh:timestamp ?timestamp\n" +
-                    "}\n" +
-                    "GROUP BY ?key ?modelType\n" +
-                    "}\n" +
-                    "\n" +
-                    "?muuid nh:key ?key;\n" +
-                    "           nh:timestamp ?ts;\n" +
-                    "           nh:modelType ?modelType;\n" +
-                    "           nh:model ?model.\n" +
-                    "\n" +
-                    "}";
-
-            String modelInnerSelect2 = "PREFIX nh: <http://mvm.com/rdf/2011/02/model#>\n" +
-                    "\n" +
-                    "SELECT ?key ?modelType (MAX(?timestamp) as ?ts) WHERE {\n" +
-                    "     ?modelUuid nh:key ?key;\n" +
-                    "     FILTER regex(?key, \"messageCount(.*)/2/13\").\n" +
-                    "     ?modelUuid nh:modelType ?modelType;\n" +
-                    "                        nh:timestamp ?timestamp\n" +
-                    "}\n" +
-                    "GROUP BY ?key ?modelType\n" +
-                    "";
-
-            String jim_query = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "\n" +
-                    "select * where {\n" +
-                    " ?eventUUID ?eventType <urn:tdo:88d4f31f-99dc-4527-8cfb-2c93583ad498> .\n" +
-//                    " ?eventUUID tdp:performedBy ?systemName .\n" +
-                    " ?eventUUID tdp:performedAt ?timeStamp .\n" +
-                    "}\n" +
-                    "ORDERBY ?timeStamp";
-
-            String theWoman = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                    " PREFIX ub: <urn:test:onto:univ#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    " ub:TheWoman ?p ?o.\n" +
-                    " }";
-
-            String hbtimestamp = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                    " PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?hbuuid hb:timestamp ?timestamp.\n" +
-                    " }";
-
-            String tde = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mvm: <urn:mvm.mmrts.rdfcloudstore/06/2011#>\n" +
-                    "SELECT * WHERE\n" +
-                    "{\n" +
-                    "\t?id tdp:performedAt ?ts.\n" +
-                    "\tFILTER(mvm:timeRange(?ts, tdp:performedAt, 'mvm.mmrts.api.date.DateTimeTtlValueConverter', 7200000, '1303911164088')).\n" +
-                    "\t?id tdp:performedBy ?system;\n" +
-                    "\t    rdf:type ?eventType.\n" +
-                    "}\n";
-
-            String tde_spec = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "SELECT * WHERE\n" +
-                    "{\n" +
-                    "\t<urn:tde:0b19e224-7524-4ba0-880b-16d9ec735303> ?p ?o.\n" +
-                    "}";
-
-            String cimLatestMeasure_timeindex = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mm: <http://mvm.com/owl/2010/10/mm.owl#>\n" +
-                    "PREFIX mvm: <urn:mvm.mmrts.rdfcloudstore/06/2011#>\n" +
-                    "SELECT ?proc ?val ?time WHERE {\n" +
-                    "     ?proc mm:loadPercentage ?val .\n" +
-                    "     ?subj rdf:subject ?proc .\n" +
-                    "     ?subj rdf:object ?val2 .\n" +
-                    "     ?subj  rdf:type rdf:Statement ;\n" +
-                    "     \t    mm:reportedAt ?time .\n" +
-                    "\tFILTER (mvm:timeRange(?time, mm:reportedAt, 'mvm.mmrts.api.date.DateTimeTtlValueConverter', '86400000', '1295725236000')). " +
-                    "}\n" +
-//                    "ORDER BY DESC(?time)\n" +
-//                    "LIMIT 25" +
-                    "";
-
-            String hbs_ti = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                    "\n" +
-                    "select * where {\n" +
-                    " ?s ?p hb:f452f776-4994-43fc-ada0-4b60cc979dac .\n" +
-                    "}";
-
-            String hbtimestamp_ti = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
-                    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
-                    "PREFIX mvm: <urn:mvm.mmrts.rdfcloudstore/06/2011#>\n" +
-                    "SELECT * WHERE\n" +
-                    "{\n" +
-                    "\t?id hb:timestamp ?timestamp.\n" +
-                    "\tFILTER(mvm:timeRange(?timestamp, hb:timestamp, 'mvm.mmrts.api.date.TimestampTtlStrValueConverter', '72000000')).\n" +
-                    "\t?id hb:systemName ?system;\n" +
-                    "\t     hb:count ?count;\n" +
-                    "}";
-
-            String tst_qu = "select * where {\n" +
-                    "\n" +
-                    " ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:test2:lubm#GraduateStudent>.\n" +
-                    " ?s <urn:test2:lubm#advisor> <http://www.Department4.University5155.edu/AssociateProfessor3>.\n" +
-//                    " ?s ?p ?o.\n" +
-                    "\n" +
-                    "}";
-
-            String tst_qu2 = "select * where {\n" +
-                    "\n" +
-                    "?s <urn:test2:lubm#takesCourse> <http://www.Department6.University4518.edu/GraduateCourse3>.\n" +
-                    "?s <urn:test2:lubm#takesCourse> <http://www.Department6.University4518.edu/GraduateCourse28>.\n" +
-                    "\n" +
-                    "}";
-
-            String timeQueryMike = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mvm: <urn:mvm.mmrts.rdfcloudstore/06/2011#>\n" +
-                    "SELECT * WHERE\n" +
-                    "{\n" +
-                    "?id tdp:reportedAt ?timestamp. \n" +
-                    "FILTER(mvm:timeRange(?timestamp, tdp:reportedAt, 'mvm.mmrts.api.date.DateTimeTtlValueConverter', '14400000')).\n" +
-                    "?id tdp:performedBy ?system;\n" +
-                    "    rdf:type ?eventType.\n" +
-                    "} ";
-
-            String eventQ = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "select * where {\n" +
-                    "?eventUUID tdp:performedBy ?systemName .\n" +
-                    "} LIMIT 100";
-
-            String allFullProf0 = "select * where {\n" +
-                    "<http://www.Department0.University0.edu/FullProfessor0> ?p ?o.\n" +
-                    "\n}";
-
-            // Provenance Queries
-            String prov_eventInfo = "select * where {\n" +
-                    "<urn:tdo:7202e20d-d66c-469c-8a10-62e36e21d820> ?p ?o.\n" +
-                    "}";
-
-            String prov_objectInfo = "PREFIX ns:<http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "select * where {\n" +
-                    "{" +
-                    "   ?s ns:createdItem <urn:tdc:01a49336-d0e0-3cd6-9ddb-0e6b14369876>.\n" +
-                    "   ?s ns:performedBy ?pb.\n" +
-                    "   ?s ns:performedAt ?pa.\n" +
-                    "}\n" +
-                    "UNION {" +
-                    "   ?s ns:clickedItem <urn:tdc:01a49336-d0e0-3cd6-9ddb-0e6b14369876>.\n" +
-                    "   ?s ns:performedBy ?pb.\n" +
-                    "   ?s ns:performedAt ?pa.\n" +
-                    "}\n" +
-                    "UNION {" +
-                    "   ?s ns:deletedItem <urn:tdc:01a49336-d0e0-3cd6-9ddb-0e6b14369876>.\n" +
-                    "   ?s ns:performedBy ?pb.\n" +
-                    "   ?s ns:performedAt ?pa.\n" +
-                    "}\n" +
-                    "UNION {" +
-                    "   ?s ns:droppedItem <urn:tdc:01a49336-d0e0-3cd6-9ddb-0e6b14369876>.\n" +
-                    "   ?s ns:performedBy ?pb.\n" +
-                    "   ?s ns:performedAt ?pa.\n" +
-                    "}\n" +
-                    "UNION {" +
-                    "   ?s ns:receivedItem <urn:tdc:01a49336-d0e0-3cd6-9ddb-0e6b14369876>.\n" +
-                    "   ?s ns:performedBy ?pb.\n" +
-                    "   ?s ns:performedAt ?pa.\n" +
-                    "}\n" +
-                    "UNION {" +
-                    "   ?s ns:storedItem <urn:tdc:01a49336-d0e0-3cd6-9ddb-0e6b14369876>.\n" +
-                    "   ?s ns:performedBy ?pb.\n" +
-                    "   ?s ns:performedAt ?pa.\n" +
-                    "}\n" +
-                    "UNION {" +
-                    "   ?s ns:sentItem <urn:tdc:01a49336-d0e0-3cd6-9ddb-0e6b14369876>.\n" +
-                    "   ?s ns:performedBy ?pb.\n" +
-                    "   ?s ns:performedAt ?pa.\n" +
-                    "}\n" +
-                    "}\n";
-
-            String prov_createdItems = "PREFIX ns: <http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "\n" +
-                    "select * where {\n" +
-                    "\n" +
-                    "  ?s ns:createdItem ?i\n" +
-                    " \n" +
-                    "} limit 10";
-            ////////////////////
-
-            String rangeQuery = "PREFIX tdp: <http://here/2010/tracked-data-provenance/ns#>\n" +
-                    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
-                    "SELECT * WHERE\n" +
-                    "{\n" +
-                    "?id tdp:reportedAt ?timestamp.\n" +
-                    "FILTER(mvmpart:timeRange(?id, tdp:reportedAt, 1314849589999 , 1314849599999 , 'XMLDATETIME')).\n" +
-                    "?id tdp:performedBy ?system.\n" +
-                    "?id rdf:type ?type.\n" +
-                    "}";
-
-            String issueMMRTS127 = "select * where \n" +
-                    "{ \n" +
-                    "<urn:system:null> ?p ?o. \n" +
-                    "?o ?p2 ?o2. \n" +
-                    "} ";
-
-            String uuid1 = "select * where { " +
-                    "?u <http://here/2010/tracked-data-provenance/ns#performedAt> ?pa; " +
-                    "   <http://here/2010/tracked-data-provenance/ns#reportedAt> ?ra; " +
-                    "   <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?t. " +
-                    "}";
-
-            String uuidAuth1 = "select * where { " +
-                    "<http://here/2010/tracked-data-provenance/ns#uuidAuth1> ?p ?o. " +
-                    "}";
-
-            String selectAll = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                    " PREFIX ub: <urn:lubm:test#>\n" +
-                    " SELECT * WHERE\n" +
-                    " {\n" +
-                    "     ?x ub:takesCourse ?c .\n" +
-                    "     ?x rdf:type ?gs .\n" +
-                    "     ?x ub:name 'UndergraduateStudent139'.\n" +
-                    " }";
-
-            String query = uuidAuth1;
-
-            Calendar start_cal = Calendar.getInstance();
-            start_cal.set(Calendar.MONTH, 7);
-            start_cal.set(Calendar.DAY_OF_MONTH, 13);
-
-            Calendar end_cal = Calendar.getInstance();
-            end_cal.set(Calendar.MONTH, 7);
-            end_cal.set(Calendar.DAY_OF_MONTH, 14);
-
-            System.out.println(query);
-            long start = System.currentTimeMillis();
-            TupleQuery tupleQuery = conn.prepareTupleQuery(
-                    QueryLanguage.SPARQL, query);
-            ValueFactory vf = ValueFactoryImpl.getInstance();
-//            tupleQuery.setBinding(NUMTHREADS_PROP, vf.createLiteral(10));
-//            tupleQuery.setBinding(AUTHORIZATION_PROP, vf.createLiteral("C"));
-//            tupleQuery.setBinding(START_BINDING, vf.createLiteral(start_cal.getTimeInMillis()));
-//            tupleQuery.setBinding(END_BINDING, vf.createLiteral(end_cal.getTimeInMillis()));
-//            tupleQuery.setBinding(START_BINDING, vf.createLiteral(1313779878435l));
-//            tupleQuery.setBinding(END_BINDING, vf.createLiteral(1313779978435l));
-
-//            TupleQueryResultHandler writer = new SPARQLResultsXMLWriter(new NullOutputStream());
-            TupleQueryResultHandler writer = new SPARQLResultsXMLWriter(System.out);
-//            tupleQuery.evaluate(writer);
-            tupleQuery.evaluate(new TupleQueryResultHandler() {
-
-                int count = 0;
-
-                @Override
-                public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
-                }
-
-                @Override
-                public void endQueryResult() throws TupleQueryResultHandlerException {
-                    System.out.println(count);
-                }
-
-                @Override
-                public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-                    count++;
-                    System.out.println(bindingSet);
-                }
-            });
-            System.out.println("Total query time: "
-                    + (System.currentTimeMillis() - start));
-
-            conn.close();
-//			os.close();
-
-            // for (String string : urls) {
-            // ByteStreams
-            // }
-            myRepository.shutDown();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-    }
-
-    public static void main(String[] args) {
-        try {
-            new QueryPartitionData().run();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-        System.exit(0);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBed.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBed.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBed.java
deleted file mode 100644
index 084cd2e..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBed.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import com.google.common.io.ByteStreams;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.Map;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Class TstBed
- * Date: Aug 2, 2011
- * Time: 9:22:11 AM
- */
-public class TstBed {
-    public static void main(String[] args) {
-        try {
-
-            String predicate = "http://here/2010/tracked-data-provenance/ns#createdItem";
-
-            Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password");
-            BatchScanner bs = connector.createBatchScanner("partitionRdf", ALL_AUTHORIZATIONS, 3);
-
-            bs.setScanIterators(21, CellLevelRecordIterator.class.getName(), "ci");
-
-            bs.setScanIterators(20, SortedRangeIterator.class.getName(), "ri");
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, DOC.toString());
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, INDEX.toString());
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_START_INCLUSIVE, "" + true);
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_END_INCLUSIVE, "" + true);
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + true);
-
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND,
-                    "\07http://here/2010/tracked-data-provenance/ns#reportedAt\u0001\u000B2011-08-26T18:01:51.000Z"
-            );
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND,
-                    "\07http://here/2010/tracked-data-provenance/ns#reportedAt\u0001\u000B2011-08-26T18:01:51.400Z"
-            );
-
-            Range range = new Range(
-                    new Key("2011-08\0"),
-                    new Key("2011-08\uFFFD")
-            );
-
-//            scanner.setRange(range);
-            bs.setRanges(Collections.singleton(range));
-//            bs.fetchColumnFamily(INDEX);
-//            bs.setColumnFamilyRegex(INDEX.toString());
-//            bs.setColumnQualifierRegex(URI_MARKER_STR + predicate + INDEX_DELIM_STR + "(.*)");
-
-            int count = 0;
-            Iterator<Map.Entry<Key, Value>> iter = bs.iterator();
-            CBConverter converter = new CBConverter();
-            while (iter.hasNext()) {
-                count++;
-//                iter.next();
-                Map.Entry<Key, Value> entry = iter.next();
-                Value value = entry.getValue();
-//                System.out.println(entry.getKey().getColumnQualifier() + "----" + value);
-                org.openrdf.model.Value subj = RdfIO.readValue(ByteStreams.newDataInput(entry.getKey().getColumnQualifier().getBytes()), VALUE_FACTORY, FAMILY_DELIM);
-                Map<String, String> map = converter.toMap(entry.getKey(), value);
-                for (Map.Entry<String, String> e : map.entrySet()) {
-                    String predObj = e.getKey();
-                    String[] split = predObj.split("\0");
-                    byte[] look = split[0].getBytes();
-                    System.out.println(subj
-                            + " : " + VALUE_FACTORY.createURI(split[0]) + " : " +
-                            RdfIO.readValue(ByteStreams.newDataInput(split[1].getBytes()), VALUE_FACTORY, FAMILY_DELIM));
-                }
-            }
-            System.out.println(count);
-
-            bs.close();
-
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBedGMDen.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBedGMDen.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBedGMDen.java
deleted file mode 100644
index 44309e2..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstBedGMDen.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import com.google.common.io.ByteStreams;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.apache.hadoop.io.Text;
-import org.openrdf.query.algebra.Var;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.Map;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Class TstBed
- * Date: Aug 2, 2011
- * Time: 9:22:11 AM
- */
-public class TstBedGMDen {
-    public static void main(String[] args) {
-        try {
-
-            Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password");
-            BatchScanner bs = connector.createBatchScanner("rdfPartition", ALL_AUTHORIZATIONS, 3);
-
-            String[] predicates = {"urn:lubm:test#takesCourse",
-                                   "urn:lubm:test#name",
-                                   "urn:lubm:test#specific",
-                                   "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"};
-
-            Text[] queries = new Text[predicates.length];
-            for (int i = 0; i < predicates.length; i++) {
-                String predicate = predicates[i];
-                queries[i] = new Text(GMDenIntersectingIterator.getRangeTerm(INDEX.toString(),
-                        URI_MARKER_STR + predicate + INDEX_DELIM_STR + "\0"
-                        , true,
-                        URI_MARKER_STR + predicate + INDEX_DELIM_STR + "\uFFFD",
-                        true
-                ));
-                System.out.println(queries[i]);
-            }
-
-            bs.setScanIterators(21, CellLevelRecordIterator.class.getName(), "ci");
-
-            bs.setScanIterators(20, GMDenIntersectingIterator.class.getName(), "ii");
-            bs.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, DOC.toString());
-            bs.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, INDEX.toString());
-            bs.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(queries));
-            bs.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + true);
-
-            Range range = new Range(
-                    new Key("2011-11\0"),
-                    new Key("2011-11\uFFFD")
-            );
-
-            bs.setRanges(Collections.singleton(range));
-
-            int count = 0;
-            Iterator<Map.Entry<Key, Value>> iter = bs.iterator();
-            CBConverter converter = new CBConverter();
-            while (iter.hasNext()) {
-                count++;
-                Map.Entry<Key, Value> entry = iter.next();
-                Value value = entry.getValue();
-                org.openrdf.model.Value subj = RdfIO.readValue(ByteStreams.newDataInput(entry.getKey().getColumnQualifier().getBytes()), VALUE_FACTORY, FAMILY_DELIM);
-                Map<String, String> map = converter.toMap(entry.getKey(), value);
-                for (Map.Entry<String, String> e : map.entrySet()) {
-                    String predObj = e.getKey();
-                    String[] split = predObj.split("\0");
-                    byte[] look = split[0].getBytes();
-                    System.out.println(subj
-                            + " : " + VALUE_FACTORY.createURI(split[0]) + " : " +
-                            RdfIO.readValue(ByteStreams.newDataInput(split[1].getBytes()), VALUE_FACTORY, FAMILY_DELIM));
-                }
-            }
-            System.out.println(count);
-
-            bs.close();
-
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstDocumentReader.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstDocumentReader.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstDocumentReader.java
deleted file mode 100644
index 7723431..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstDocumentReader.java
+++ /dev/null
@@ -1,217 +0,0 @@
-//package mvm.mmrts.rdf.partition;
-//
-//import cloudbase.core.CBConstants;
-//import cloudbase.core.client.Connector;
-//import cloudbase.core.client.ZooKeeperInstance;
-//import cloudbase.core.client.mock.MockInstance;
-//import cloudbase.core.data.Key;
-//import cloudbase.core.data.Range;
-//import cloudbase.core.data.Value;
-//import cloudbase.core.util.TextUtil;
-//import mvm.mmrts.cloudbase.utils.client.DocumentBatchScanner;
-//import mvm.mmrts.cloudbase.utils.client.DocumentConnectorImpl;
-//import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-//import org.apache.hadoop.io.Text;
-//import org.apache.log4j.Level;
-//import org.apache.log4j.Logger;
-//import org.openrdf.model.ValueFactory;
-//import org.openrdf.model.impl.StatementImpl;
-//import org.openrdf.model.impl.ValueFactoryImpl;
-//import org.openrdf.repository.RepositoryException;
-//import org.openrdf.repository.sail.SailRepository;
-//import org.openrdf.repository.sail.SailRepositoryConnection;
-//import ss.cloudbase.core.iterators.SortedRangeIterator;
-//
-//import javax.xml.datatype.DatatypeConfigurationException;
-//import javax.xml.datatype.DatatypeFactory;
-//import java.util.Collections;
-//import java.util.Iterator;
-//import java.util.List;
-//import java.util.Map;
-//
-//import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-//
-///**
-// * Class TstDocumentReader
-// * Date: Sep 8, 2011
-// * Time: 9:11:27 AM
-// */
-//public class TstDocumentReader {
-//    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
-//    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
-//    public static final String HBNAMESPACE = "http://here/2010/tracked-data-provenance/heartbeat/ns#";
-//    public static final String HB_TIMESTAMP = HBNAMESPACE + "timestamp";
-//
-//    private static SailRepository repository;
-//    private static SailRepositoryConnection connection;
-//
-//    private static ValueFactory vf = ValueFactoryImpl.getInstance();
-//
-//    private static final String TABLE = "partitionRdf";
-//    private static final long START = 1309532965000l;
-//    private static final long END = 1310566686000l;
-//    private static String objectUuid = "objectuuid1";
-//
-//    public static void main(String[] args) {
-//        try {
-////            Logger.getRootLogger().setLevel(Level.TRACE);
-//            DocumentConnectorImpl connector = new DocumentConnectorImpl(new ZooKeeperInstance("stratus", "stratus13:2181"), "root", "password".getBytes());
-////            DocumentConnectorImpl connector = new DocumentConnectorImpl(new MockInstance(), "", "".getBytes());
-//
-////            PartitionSail sail = new PartitionSail(connector, TABLE, new DateHashModShardValueGenerator() {
-////                @Override
-////                public String generateShardValue(Object obj) {
-////                    return this.generateShardValue(START + 1000, obj);
-////                }
-////            });
-////
-////            repository = new SailRepository(sail);
-////            repository.initialize();
-////            connection = repository.getConnection();
-////
-////            loadData();
-//
-//            DocumentBatchScanner bs = connector.createDocumentBatchScanner(TABLE, CBConstants.NO_AUTHS, 2);
-//
-//            bs.setScanIterators(20, SortedRangeIterator.class.getName(), "ri");
-//            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, DOC.toString());
-//            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, INDEX.toString());
-//            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_START_INCLUSIVE, "" + true);
-//            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_END_INCLUSIVE, "" + true);
-//            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + true);
-//
-//            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND,
-//                    "\07http://here/2010/tracked-data-provenance/ns#reportedAt\u0001\u000B2011-08-26T18:01:51Z"
-//            );
-//            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND,
-//                    "\07http://here/2010/tracked-data-provenance/ns#reportedAt\u0001\u000B2011-08-26T18:02:00Z"
-//            );
-//
-//            Range range = new Range(
-//                    new Key("2011-08\0"),
-//                    new Key("2011-08\uFFFD")
-//            );
-//
-//            bs.setRanges(Collections.singleton(range));
-//
-//            int count = 0;
-//            int innerCount = 0;
-//            Iterator<List<? extends Map.Entry<Key, Value>>> iter = bs.documentIterator();
-//            while (iter.hasNext()) {
-//                count++;
-//                List<? extends Map.Entry<Key, Value>> entries = iter.next();
-//                for (Map.Entry<Key, Value> entry : entries) {
-//                    System.out.print(entry.getKey().getColumnQualifier());
-//                    System.out.println(" ");
-//                    innerCount++;
-//                }
-//                System.out.println();
-//            }
-//            System.out.println(count);
-//            System.out.println(innerCount);
-//
-//            bs.close();
-////            connection.close();
-////            repository.shutDown();
-//
-//        } catch (Exception e) {
-//            e.printStackTrace();
-//        }
-//    }
-//
-//    private static void loadData() throws RepositoryException, DatatypeConfigurationException {
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, objectUuid), vf.createURI(NAMESPACE, "name"), vf.createLiteral("objUuid")));
-//        //created
-//        String uuid = "uuid1";
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, objectUuid)));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit1")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit2")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit3")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit4")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit1")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit2")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit3")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))));
-//        //clicked
-//        uuid = "uuid2";
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Clicked")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "clickedItem"), vf.createURI(NAMESPACE, objectUuid)));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:B")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 2, 0, 0, 0))));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 3, 0, 0, 0))));
-//        //deleted
-//        uuid = "uuid3";
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Deleted")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "deletedItem"), vf.createURI(NAMESPACE, objectUuid)));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:C")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 4, 0, 0, 0))));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 5, 0, 0, 0))));
-//        //dropped
-//        uuid = "uuid4";
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Dropped")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "droppedItem"), vf.createURI(NAMESPACE, objectUuid)));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:D")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 6, 0, 0, 0))));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 7, 0, 0, 0))));
-//        //received
-//        uuid = "uuid5";
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Received")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "receivedItem"), vf.createURI(NAMESPACE, objectUuid)));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:E")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 8, 0, 0, 0))));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 9, 0, 0, 0))));
-//        //sent
-//        uuid = "uuid6";
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Sent")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "sentItem"), vf.createURI(NAMESPACE, objectUuid)));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:F")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 10, 0, 0, 0))));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 11, 0, 0, 0))));
-//        //stored
-//        uuid = "uuid7";
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Stored")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "storedItem"), vf.createURI(NAMESPACE, objectUuid)));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:G")));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 12, 0, 0, 0))));
-//        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 13, 0, 0, 0))));
-//
-//        //heartbeats
-//        String hbuuid = "hbuuid1";
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 1) + "")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(1 + "")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:A")));
-//        connection.add(new StatementImpl(vf.createURI("urn:system:A"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-//
-//        hbuuid = "hbuuid2";
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 2) + "")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(2 + "")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:B")));
-//        connection.add(new StatementImpl(vf.createURI("urn:system:B"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-//
-//        hbuuid = "hbuuid3";
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 3) + "")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(3 + "")));
-//        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:C")));
-//        connection.add(new StatementImpl(vf.createURI("urn:system:C"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
-//
-//        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
-//        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
-//        connection.commit();
-//    }
-//
-//
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstScanner.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstScanner.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstScanner.java
deleted file mode 100644
index 6650cf1..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/TstScanner.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import org.apache.hadoop.io.Text;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.Map;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Class TstBed
- * Date: Aug 2, 2011
- * Time: 9:22:11 AM
- */
-public class TstScanner {
-    public static void main(String[] args) {
-        try {
-            Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password");
-            BatchScanner bs = connector.createBatchScanner("partitionRdf", ALL_AUTHORIZATIONS, 3);
-
-            Text shard = new Text("2011-08-40");
-            String uri = "urn:tde:c5e2f4d8-a5a6-48d8-ba55-1acea969c38d";
-            bs.setRanges(Collections.singleton(
-                    new Range(
-                            new Key(
-                                    shard, DOC,
-                                    new Text(URI_MARKER_STR + uri + FAMILY_DELIM_STR + "\0")
-                            ),
-                            new Key(
-                                    shard, DOC,
-                                    new Text(URI_MARKER_STR + uri + FAMILY_DELIM_STR + "\uFFFD")
-                            )
-                    )
-            ));
-
-            int count = 0;
-            Iterator<Map.Entry<Key, Value>> iter = bs.iterator();
-            while (iter.hasNext()) {
-                count++;
-//                iter.next();
-                System.out.println(iter.next().getKey().getColumnQualifier());
-            }
-            System.out.println(count);
-
-            bs.close();
-
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGeneratorTest.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGeneratorTest.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGeneratorTest.java
deleted file mode 100644
index fe27ec7..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGeneratorTest.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package mvm.mmrts.rdf.partition.shard;
-
-import junit.framework.TestCase;
-
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
-/**
- * Class DateHashModShardValueGeneratorTest
- * Date: Jul 6, 2011
- * Time: 6:35:32 PM
- */
-public class DateHashModShardValueGeneratorTest extends TestCase {
-    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd");
-    Calendar cal = Calendar.getInstance();
-
-    public void testGenerateShardValue() throws Exception {
-
-        DateHashModShardValueGenerator gen = new DateHashModShardValueGenerator();
-        gen.setBaseMod(100);
-        assertEquals(gen.generateShardValue("subject"), dateFormat.format(cal.getTime()) + "_68");
-    }
-
-    public void testGenerateShardValueNullObject() throws Exception {
-        DateHashModShardValueGenerator gen = new DateHashModShardValueGenerator();
-        gen.setBaseMod(100);
-        assertEquals(gen.generateShardValue(null), dateFormat.format(cal.getTime()));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/utils/RdfIOTest.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/utils/RdfIOTest.java b/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/utils/RdfIOTest.java
deleted file mode 100644
index d251485..0000000
--- a/partition/partition.rdf/src/test/java/mvm/mmrts/rdf/partition/utils/RdfIOTest.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package mvm.mmrts.rdf.partition.utils;
-
-import com.google.common.io.ByteStreams;
-import com.google.common.primitives.Bytes;
-import junit.framework.TestCase;
-import org.openrdf.model.Statement;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-import static mvm.mmrts.rdf.partition.utils.RdfIO.*;
-
-/**
- * Class RdfIOTest
- * Date: Jul 6, 2011
- * Time: 12:59:25 PM
- */
-public class RdfIOTest extends TestCase {
-
-    ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    public void testWriteStatementEvent() throws Exception {
-        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        byte[] bytes = writeStatement(stmt, true);
-    }
-
-    public void testWriteStatementIndex() throws Exception {
-        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        byte[] bytes = writeStatement(stmt, false);
-    }
-
-    public void testExtraInfoInStmtBytes() throws Exception {
-        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        byte[] bytes = writeStatement(stmt, true);
-        bytes = Bytes.concat(bytes, "extrainformation".getBytes());
-        Statement readStmt = readStatement(ByteStreams.newDataInput(bytes), ValueFactoryImpl.getInstance());
-        System.out.println(readStmt);
-    }
-
-    public void testReadStatement() throws Exception {
-        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        byte[] bytes = writeStatement(stmt, true);
-
-        Statement readStmt = readStatement(ByteStreams.newDataInput(bytes), vf);
-        assertEquals(readStmt, stmt);
-
-        //testing blank node
-        stmt = new StatementImpl(vf.createBNode("a12345"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        bytes = writeStatement(stmt, true);
-
-        readStmt = readStatement(ByteStreams.newDataInput(bytes), vf);
-        assertEquals(readStmt, stmt);
-
-        //testing boolean literal datatype
-        stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral(true));
-        bytes = writeStatement(stmt, true);
-
-        readStmt = readStatement(ByteStreams.newDataInput(bytes), vf);
-        assertEquals(readStmt, stmt);
-        
-        //testing boolean literal datatype
-        stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("label", "language"));
-        bytes = writeStatement(stmt, true);
-
-        readStmt = readStatement(ByteStreams.newDataInput(bytes), vf);
-        assertEquals(readStmt, stmt);
-    }
-
-    public void testReadIndexStatement() throws Exception {
-        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        byte[] bytes = writeStatement(stmt, false);
-
-        Statement readStmt = readStatement(ByteStreams.newDataInput(bytes), vf, false);
-        assertEquals(readStmt, stmt);
-
-        bytes = writeStatement(stmt, true);
-
-        readStmt = readStatement(ByteStreams.newDataInput(bytes), vf, true);
-        assertEquals(readStmt, stmt);
-    }
-
-}


[55/56] [abbrv] incubator-rya git commit: RYA-17, RYA-19, RYA-20 issue with mongo deletes, typo in constructor, redundant indices

Posted by mi...@apache.org.
RYA-17, RYA-19, RYA-20 issue with mongo deletes, typo in constructor, redundant indices


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/1007611e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/1007611e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/1007611e

Branch: refs/heads/master
Commit: 1007611ee5955ff1371f50dbe9c646f6452f290e
Parents: 1eae901
Author: pujav65 <pu...@gmail.com>
Authored: Wed Dec 16 23:23:25 2015 -0500
Committer: Aaron Mihalik <mi...@alum.mit.edu>
Committed: Tue Dec 22 11:48:32 2015 -0500

----------------------------------------------------------------------
 dao/mongodb.rya/pom.xml                         |   5 +
 .../java/mvm/rya/mongodb/MongoDBRyaDAO.java     |  44 ++++++-
 .../dao/SimpleMongoDBStorageStrategy.java       |   9 +-
 .../java/mvm/rya/mongodb/MongoDBRyaDAOTest.java | 121 +++++++++++++++++++
 4 files changed, 167 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/1007611e/dao/mongodb.rya/pom.xml
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/pom.xml b/dao/mongodb.rya/pom.xml
index 0d87fa5..b7d0c0e 100644
--- a/dao/mongodb.rya/pom.xml
+++ b/dao/mongodb.rya/pom.xml
@@ -43,6 +43,11 @@ under the License.
             <groupId>de.flapdoodle.embed</groupId>
             <artifactId>de.flapdoodle.embed.mongo</artifactId>
         </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/1007611e/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
index 1f341dc..b9124e3 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/MongoDBRyaDAO.java
@@ -69,18 +69,39 @@ public class MongoDBRyaDAO implements RyaDAO<MongoDBRdfConfiguration>{
 	
 	public MongoDBRyaDAO(MongoDBRdfConfiguration conf) throws RyaDAOException{
 		this.conf = conf;
+		initConnection();
+		init();
+	}
+
+	
+	public MongoDBRyaDAO(MongoDBRdfConfiguration conf, MongoClient mongoClient) throws RyaDAOException{
+		this.conf = conf;
+		this.mongoClient = mongoClient;
 		init();
 	}
 
 	public void setConf(MongoDBRdfConfiguration conf) {
 		this.conf = conf;
 	}
+	
+	public void setMongoClient(MongoClient mongoClient) {
+		this.mongoClient = mongoClient;
+	}
+
+	public void setDB(DB db) {
+		this.db = db;
+	}
+
+	
+	public void setDBCollection(DBCollection coll) {
+		this.coll = coll;
+	}
 
     public MongoDBRdfConfiguration getConf() {
         return conf;
     }
 
-    public void init() throws RyaDAOException {
+    public void initConnection() throws RyaDAOException {
         try {
             boolean useMongoTest = conf.getUseTestMongo();
             if (useMongoTest) {
@@ -94,13 +115,26 @@ public class MongoDBRyaDAO implements RyaDAO<MongoDBRdfConfiguration>{
                 if (conf.get(MongoDBRdfConfiguration.MONGO_USER) != null) {
                     MongoCredential cred = MongoCredential.createCredential(
                             conf.get(MongoDBRdfConfiguration.MONGO_USER),
-                            conf.get(MongoDBRdfConfiguration.MONGO_USER_PASSWORD),
-                            conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME).toCharArray());
+                            conf.get(MongoDBRdfConfiguration.MONGO_DB_NAME),
+                            conf.get(MongoDBRdfConfiguration.MONGO_USER_PASSWORD).toCharArray());
                     mongoClient = new MongoClient(server, Arrays.asList(cred));
                 } else {
                     mongoClient = new MongoClient(server);
                 }
             }
+        } catch (UnknownHostException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        } catch (IOException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+
+    }
+
+    
+    public void init() throws RyaDAOException {
+        try {
             secondaryIndexers = conf.getAdditionalIndexers();
             for(RyaSecondaryIndexer index: secondaryIndexers) {
                 index.setConf(conf);
@@ -179,7 +213,7 @@ public class MongoDBRyaDAO implements RyaDAO<MongoDBRdfConfiguration>{
 
 	public void delete(RyaStatement statement, MongoDBRdfConfiguration conf)
 			throws RyaDAOException {
-		DBObject obj = storageStrategy.serialize(statement);
+		DBObject obj = storageStrategy.getQuery(statement);
 		coll.remove(obj);
 	}
 
@@ -192,7 +226,7 @@ public class MongoDBRyaDAO implements RyaDAO<MongoDBRdfConfiguration>{
 			MongoDBRdfConfiguration conf) throws RyaDAOException {
 		while (statements.hasNext()){
 			RyaStatement ryaStatement = statements.next();
-			coll.remove(storageStrategy.serialize(ryaStatement));
+			coll.remove(storageStrategy.getQuery(ryaStatement));
 		}
 		
 	}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/1007611e/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
index 24d16c1..3ecc0dc 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
@@ -52,17 +52,12 @@ public class SimpleMongoDBStorageStrategy implements MongoDBStorageStrategy {
 	
 	@Override
 	public void createIndices(DBCollection coll){
-		coll.createIndex("subject");
-		coll.createIndex("predicate");
 		BasicDBObject doc = new BasicDBObject();
 	    doc.put(SUBJECT, 1);
 	    doc.put(PREDICATE, 1);
 		coll.createIndex(doc);
-		doc = new BasicDBObject(OBJECT, 1);
-		doc.put(OBJECT_TYPE, 1);
-		doc.put(PREDICATE, 1);
-		coll.createIndex(doc);
-		doc = new BasicDBObject(OBJECT, 1);
+		doc = new BasicDBObject(PREDICATE, 1);
+		doc.put(OBJECT, 1);
 		doc.put(OBJECT_TYPE, 1);
 		coll.createIndex(doc);
 		doc = new BasicDBObject(OBJECT, 1);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/1007611e/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java b/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java
new file mode 100644
index 0000000..3d900b0
--- /dev/null
+++ b/dao/mongodb.rya/src/test/java/mvm/rya/mongodb/MongoDBRyaDAOTest.java
@@ -0,0 +1,121 @@
+package mvm.rya.mongodb;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaStatement.RyaStatementBuilder;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.persist.RyaDAOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.mongodb.DB;
+import com.mongodb.DBCollection;
+import com.mongodb.MongoClient;
+import com.mongodb.MongoException;
+
+import de.flapdoodle.embed.mongo.distribution.Version;
+import de.flapdoodle.embed.mongo.tests.MongodForTestsFactory;
+
+public class MongoDBRyaDAOTest {
+	
+	private MongodForTestsFactory testsFactory;
+	private MongoDBRyaDAO dao;
+	private MongoDBRdfConfiguration configuration;
+	private MongoClient mongoClient;
+	
+	@Before
+	public void setUp() throws IOException, RyaDAOException{
+		testsFactory = MongodForTestsFactory.with(Version.Main.PRODUCTION);
+	       Configuration conf = new Configuration();
+	        conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true");
+	        conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, "test");
+	        conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya_");
+	        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, "rya_");
+	        configuration = new MongoDBRdfConfiguration(conf);
+			mongoClient = testsFactory.newMongo();
+            int port = mongoClient.getServerAddressList().get(0).getPort();
+            configuration.set(MongoDBRdfConfiguration.MONGO_INSTANCE_PORT, Integer.toString(port));
+			dao = new MongoDBRyaDAO(configuration, mongoClient);
+		
+	}
+
+	@Test
+	public void testDeleteWildcard() throws RyaDAOException {
+		RyaStatementBuilder builder = new RyaStatementBuilder();
+		builder.setPredicate(new RyaURI("http://temp.com"));
+		dao.delete(builder.build(), configuration);
+	}
+	
+	
+	@Test
+	public void testAdd() throws RyaDAOException, MongoException, IOException {
+		RyaStatementBuilder builder = new RyaStatementBuilder();
+		builder.setPredicate(new RyaURI("http://temp.com"));
+		builder.setSubject(new RyaURI("http://subject.com"));
+		builder.setObject(new RyaURI("http://object.com"));
+		
+		DB db = mongoClient.getDB(configuration.get(MongoDBRdfConfiguration.MONGO_DB_NAME));
+        DBCollection coll = db.getCollection(configuration.getTriplesCollectionName());
+          
+		dao.add(builder.build());
+
+        assertEquals(coll.count(),1);
+		
+	}
+	
+	@Test
+	public void testDelete() throws RyaDAOException, MongoException, IOException {
+		RyaStatementBuilder builder = new RyaStatementBuilder();
+		builder.setPredicate(new RyaURI("http://temp.com"));
+		builder.setSubject(new RyaURI("http://subject.com"));
+		builder.setObject(new RyaURI("http://object.com"));
+		RyaStatement statement = builder.build();
+		
+		DB db = mongoClient.getDB(configuration.get(MongoDBRdfConfiguration.MONGO_DB_NAME));
+        DBCollection coll = db.getCollection(configuration.getTriplesCollectionName());
+          
+		dao.add(statement);
+
+        assertEquals(coll.count(),1);
+		
+        dao.delete(statement, configuration);
+        
+        assertEquals(coll.count(),0);
+
+	}
+
+	@Test
+	public void testDeleteWildcardSubjectWithContext() throws RyaDAOException, MongoException, IOException {
+		RyaStatementBuilder builder = new RyaStatementBuilder();
+		builder.setPredicate(new RyaURI("http://temp.com"));
+		builder.setSubject(new RyaURI("http://subject.com"));
+		builder.setObject(new RyaURI("http://object.com"));
+		builder.setContext(new RyaURI("http://context.com"));
+		RyaStatement statement = builder.build();
+		
+		DB db = mongoClient.getDB(configuration.get(MongoDBRdfConfiguration.MONGO_DB_NAME));
+        DBCollection coll = db.getCollection(configuration.getTriplesCollectionName());
+          
+		dao.add(statement);
+
+        assertEquals(coll.count(),1);
+        
+		RyaStatementBuilder builder2 = new RyaStatementBuilder();
+		builder2.setPredicate(new RyaURI("http://temp.com"));
+		builder2.setObject(new RyaURI("http://object.com"));
+		builder2.setContext(new RyaURI("http://context3.com"));
+		RyaStatement query = builder2.build();
+		
+        dao.delete(query, configuration);
+        
+        assertEquals(coll.count(),1);
+
+	}
+
+}


[14/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
deleted file mode 100644
index ce9adaa..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ExternalMultipleBindingSetsIterator.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import info.aduna.iteration.LookAheadIteration;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-
-/**
- */
-public class ExternalMultipleBindingSetsIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
-
-    private final ParallelEvaluationStrategyImpl strategy;
-    private final CloseableIteration leftIter;
-    private ExternalBatchingIterator stmtPtrn;
-    private CloseableIteration<BindingSet, QueryEvaluationException> iter;
-    //TODO: configurable
-    private int batchSize = 1000;
-
-    public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, ExternalBatchingIterator stmtPattern, BindingSet bindings)
-            throws QueryEvaluationException {
-        this.strategy = strategy;
-        leftIter = strategy.evaluate(leftArg, bindings);
-        this.stmtPtrn = stmtPattern;
-        initIter();
-    }
-
-    public ExternalMultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, ExternalBatchingIterator stmtPattern, BindingSet bindings)
-            throws QueryEvaluationException {
-        this.strategy = strategy;
-        this.leftIter = leftIter;
-        this.stmtPtrn = stmtPattern;
-        initIter();
-    }
-
-    protected void initIter() throws QueryEvaluationException {
-        try {
-            Collection<BindingSet> sets = new ArrayList<BindingSet>();
-            int i = 0;
-            while (leftIter.hasNext()) {
-                //default to 1K for the batch size
-                if (i >= batchSize) {
-                    break;
-                }
-                sets.add((BindingSet) leftIter.next());
-                i++;
-            }
-            if (iter != null) iter.close();
-            iter = stmtPtrn.evaluate(sets);
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    protected BindingSet getNextElement()
-            throws QueryEvaluationException {
-        try {
-            while (true) {
-                if (iter.hasNext()) {
-                    return iter.next();
-                }
-
-                if (leftIter.hasNext()) {
-                    initIter();
-                } else
-                    return null;
-            }
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    protected void handleClose()
-            throws QueryEvaluationException {
-        try {
-            super.handleClose();
-            leftIter.close();
-            iter.close();
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
deleted file mode 100644
index f2ac8c6..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/FilterRangeVisitor.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.domain.RangeURI;
-import mvm.rya.api.domain.RangeValue;
-import org.openrdf.model.Value;
-import org.openrdf.model.impl.BooleanLiteralImpl;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.*;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.RANGE;
-
-/**
- * Class FilterTimeIndexVisitor
- * Date: Apr 11, 2011
- * Time: 10:16:15 PM
- */
-public class FilterRangeVisitor extends QueryModelVisitorBase {
-
-    private RdfCloudTripleStoreConfiguration conf;
-    private Map<Var, RangeValue> rangeValues = new HashMap<Var, RangeValue>();
-
-    public FilterRangeVisitor(RdfCloudTripleStoreConfiguration conf) {
-        this.conf = conf;
-    }
-
-    @Override
-    public void meet(Filter node) throws Exception {
-        super.meet(node);
-
-        ValueExpr arg = node.getCondition();
-        if (arg instanceof FunctionCall) {
-            FunctionCall fc = (FunctionCall) arg;
-            if (RANGE.stringValue().equals(fc.getURI())) {
-                //range(?var, start, end)
-                List<ValueExpr> valueExprs = fc.getArgs();
-                if (valueExprs.size() != 3) {
-                    throw new QueryEvaluationException("mvm:range must have 3 parameters: variable, start, end");
-                }
-                Var var = (Var) valueExprs.get(0);
-                ValueConstant startVc = (ValueConstant) valueExprs.get(1);
-                ValueConstant endVc = (ValueConstant) valueExprs.get(2);
-                Value start = startVc.getValue();
-                Value end = endVc.getValue();
-                rangeValues.put(var, new RangeValue(start, end));
-                node.setCondition(new ValueConstant(BooleanLiteralImpl.TRUE));
-            }
-        }
-    }
-
-    @Override
-    public void meet(StatementPattern node) throws Exception {
-        super.meet(node);
-
-        Var subjectVar = node.getSubjectVar();
-        RangeValue subjRange = rangeValues.get(subjectVar);
-        Var predVar = node.getPredicateVar();
-        RangeValue predRange = rangeValues.get(predVar);
-        Var objVar = node.getObjectVar();
-        RangeValue objRange = rangeValues.get(objVar);
-        if(subjRange != null) {
-            subjectVar.setValue(new RangeURI(subjRange));//Assumes no blank nodes can be ranges
-        }
-        if(predRange != null) {
-            predVar.setValue(new RangeURI(predRange));
-        }
-        if(objRange != null) {
-            objVar.setValue(objRange);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
deleted file mode 100644
index d1c5641..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/MultipleBindingSetsIterator.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import info.aduna.iteration.LookAheadIteration;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-/**
- */
-public class MultipleBindingSetsIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
-
-    private final ParallelEvaluationStrategyImpl strategy;
-    private final CloseableIteration leftIter;
-    private StatementPattern stmtPtrn;
-    private CloseableIteration<BindingSet, QueryEvaluationException> iter;
-    //TODO: configurable
-    private int batchSize = 1000;
-
-    public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, TupleExpr leftArg, StatementPattern stmtPattern, BindingSet bindings)
-            throws QueryEvaluationException {
-        this.strategy = strategy;
-        leftIter = strategy.evaluate(leftArg, bindings);
-        this.stmtPtrn = stmtPattern;
-        initIter();
-    }
-
-    public MultipleBindingSetsIterator(ParallelEvaluationStrategyImpl strategy, CloseableIteration leftIter, StatementPattern stmtPattern, BindingSet bindings)
-            throws QueryEvaluationException {
-        this.strategy = strategy;
-        this.leftIter = leftIter;
-        this.stmtPtrn = stmtPattern;
-        initIter();
-    }
-
-    protected void initIter() throws QueryEvaluationException {
-        try {
-            Collection<BindingSet> sets = new ArrayList<BindingSet>();
-            int i = 0;
-            while (leftIter.hasNext()) {
-                //default to 1K for the batch size
-                if (i >= batchSize) {
-                    break;
-                }
-                sets.add((BindingSet) leftIter.next());
-                i++;
-            }
-            if (iter != null) iter.close();
-            iter = strategy.evaluate(stmtPtrn, sets);
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    protected BindingSet getNextElement()
-            throws QueryEvaluationException {
-        try {
-            while (true) {
-                if (iter.hasNext()) {
-                    return iter.next();
-                }
-
-                if (leftIter.hasNext()) {
-                    initIter();
-                } else
-                    return null;
-            }
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    protected void handleClose()
-            throws QueryEvaluationException {
-        try {
-            super.handleClose();
-            leftIter.close();
-            iter.close();
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
deleted file mode 100644
index 667b712..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelEvaluationStrategyImpl.java
+++ /dev/null
@@ -1,280 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import info.aduna.iteration.ConvertingIteration;
-import info.aduna.iteration.EmptyIteration;
-import info.aduna.iteration.Iteration;
-import info.aduna.iteration.IteratorIteration;
-import info.aduna.iteration.LimitIteration;
-import info.aduna.iteration.OffsetIteration;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.utils.NullableStatementImpl;
-import mvm.rya.rdftriplestore.RdfCloudTripleStoreConnection;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-import mvm.rya.rdftriplestore.inference.InferenceEngineException;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
-
-import org.apache.log4j.Logger;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.Filter;
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.QueryRoot;
-import org.openrdf.query.algebra.Slice;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.ValueExpr;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.QueryBindingSet;
-import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl;
-import org.openrdf.query.algebra.evaluation.iterator.FilterIterator;
-import org.openrdf.query.algebra.evaluation.iterator.JoinIterator;
-import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil;
-
-import com.google.common.collect.Lists;
-
-/**
- */
-public class ParallelEvaluationStrategyImpl extends EvaluationStrategyImpl {
-    private static Logger logger = Logger.getLogger(ParallelEvaluationStrategyImpl.class);
-    
-    private int numOfThreads = 10;
-    private boolean performant = true;
-    private boolean displayQueryPlan = false;
-    private ExecutorService executorService;
-    private InferenceEngine inferenceEngine;
-
-    public ParallelEvaluationStrategyImpl(RdfCloudTripleStoreConnection.StoreTripleSource tripleSource, InferenceEngine inferenceEngine,
-                                          Dataset dataset, RdfCloudTripleStoreConfiguration conf) {
-        super(tripleSource, dataset);
-        Integer nthreads = conf.getNumThreads();
-        this.numOfThreads = (nthreads != null) ? nthreads : this.numOfThreads;
-        Boolean val = conf.isPerformant();
-        this.performant = (val != null) ? val : this.performant;
-        val = conf.isDisplayQueryPlan();
-        this.displayQueryPlan = (val != null) ? val : this.displayQueryPlan;
-        this.executorService = Executors.newFixedThreadPool(this.numOfThreads);
-        this.inferenceEngine = inferenceEngine;
-    }
-
-    @Override
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(Join join, BindingSet bindings) throws QueryEvaluationException {
-        if (performant) {
-            TupleExpr buffer = join.getLeftArg();
-            if (join.getRightArg() instanceof StatementPattern) {
-                TupleExpr stmtPat = join.getRightArg();
-//                if(buffer instanceof StatementPattern && !(stmtPat instanceof StatementPattern)){
-//                    buffer = stmtPat;
-//                    stmtPat = join.getLeftArg();
-//                }
-
-                return new MultipleBindingSetsIterator(this, buffer, (StatementPattern) stmtPat, bindings);
-            } else if (join.getRightArg() instanceof ExternalBatchingIterator) {
-                    TupleExpr stmtPat = join.getRightArg();
-
-                    return new ExternalMultipleBindingSetsIterator(this, buffer, (ExternalBatchingIterator) stmtPat, bindings);
-            } else if (join.getRightArg() instanceof Filter) {
-                //add performance for the filter too
-                Filter filter = (Filter) join.getRightArg();
-                TupleExpr filterChild = filter.getArg();
-                if (filterChild instanceof StatementPattern) {
-                    return new FilterIterator(filter, new MultipleBindingSetsIterator(this, buffer, (StatementPattern) filterChild, bindings), this);
-                } else if (filterChild instanceof Join) {
-                    Join filterChildJoin = (Join) filterChild;
-                    TupleExpr fcj_left = filterChildJoin.getLeftArg();
-                    TupleExpr fcj_right = filterChildJoin.getRightArg();
-                    //TODO: Should be a better way, maybe reorder the filter?
-                    //very particular case filter(join(stmtPat, stmtPat))
-                    if (fcj_left instanceof StatementPattern && fcj_right instanceof StatementPattern) {
-                        return new FilterIterator(filter, new MultipleBindingSetsIterator(this, new Join(buffer, fcj_left), (StatementPattern) fcj_right, bindings), this);
-                    }
-                }
-                //TODO: add a configuration flag for ParallelJoinIterator
-                return new JoinIterator(this, join, bindings);
-            } else {
-                //TODO: add a configuration flag for ParallelJoinIterator
-                return new JoinIterator(this, join, bindings);
-            }
-        } else {
-            return super.evaluate(join, bindings);
-        }
-    }
-
-    @Override
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(StatementPattern sp, BindingSet bindings) throws QueryEvaluationException {
-        //TODO: Wonder if creating a Collection here hurts performance
-        Set<BindingSet> bs = Collections.singleton(bindings);
-        return this.evaluate(sp, bs);
-    }
-
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(final StatementPattern sp, Collection<BindingSet> bindings)
-            throws QueryEvaluationException {
-
-        final Var subjVar = sp.getSubjectVar();
-        final Var predVar = sp.getPredicateVar();
-        final Var objVar = sp.getObjectVar();
-        final Var cntxtVar = sp.getContextVar();
-
-        List<Map.Entry<Statement, BindingSet>> stmts = new ArrayList<Map.Entry<Statement, BindingSet>>();
-
-        Iteration<? extends Map.Entry<Statement, BindingSet>, QueryEvaluationException> iter;
-        if (sp instanceof FixedStatementPattern) {
-            Collection<Map.Entry<Statement, BindingSet>> coll = Lists.newArrayList();
-            for (BindingSet binding : bindings) {
-                Value subjValue = getVarValue(subjVar, binding);
-                Value predValue = getVarValue(predVar, binding);
-                Value objValue = getVarValue(objVar, binding);
-                Resource contxtValue = (Resource) getVarValue(cntxtVar, binding);
-                for (Statement st : ((FixedStatementPattern) sp).statements) {
-                    if (!((subjValue != null && !subjValue.equals(st.getSubject())) ||
-                            (predValue != null && !predValue.equals(st.getPredicate())) ||
-                            (objValue != null && !objValue.equals(st.getObject())))) {
-                        coll.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(st, binding));
-                    }
-                }
-            }
-            iter = new IteratorIteration(coll.iterator());
-        } else if (sp instanceof TransitivePropertySP &&
-                ((subjVar != null && subjVar.getValue() != null) ||
-                        (objVar != null && objVar.getValue() != null)) &&
-                sp.getPredicateVar() != null) {
-            //if this is a transitive prop ref, we need to make sure that either the subj or obj is not null
-            //TODO: Cannot handle a open ended transitive property where subj and obj are null
-            //TODO: Should one day handle filling in the subj or obj with bindings and working this
-            //TODO: a lot of assumptions, and might be a large set returned causing an OME
-            Set<Statement> sts = null;
-            try {
-                sts = inferenceEngine.findTransitiveProperty((Resource) getVarValue(subjVar),
-                        (URI) getVarValue(predVar), getVarValue(objVar), (Resource) getVarValue(cntxtVar));
-            } catch (InferenceEngineException e) {
-                throw new QueryEvaluationException(e);
-            }
-            Collection<Map.Entry<Statement, BindingSet>> coll = new ArrayList();
-            for (BindingSet binding : bindings) {
-                for (Statement st : sts) {
-                    coll.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(st, binding));
-                }
-            }
-            iter = new IteratorIteration(coll.iterator());
-        } else {
-            for (BindingSet binding : bindings) {
-                Value subjValue = getVarValue(subjVar, binding);
-                Value predValue = getVarValue(predVar, binding);
-                Value objValue = getVarValue(objVar, binding);
-                Resource contxtValue = (Resource) getVarValue(cntxtVar, binding);
-                if ((subjValue != null && !(subjValue instanceof Resource)) ||
-                        (predValue != null && !(predValue instanceof URI))) {
-                    continue;
-                }
-                stmts.add(new RdfCloudTripleStoreUtils.CustomEntry<Statement, BindingSet>(
-                        new NullableStatementImpl((Resource) subjValue, (URI) predValue, objValue, contxtValue), binding));
-            }
-            if (stmts.size() == 0) {
-                return new EmptyIteration();
-            }
-
-            iter = ((RdfCloudTripleStoreConnection.StoreTripleSource) tripleSource).getStatements(stmts);
-        }
-        return new ConvertingIteration<Map.Entry<Statement, BindingSet>, BindingSet, QueryEvaluationException>(iter) {
-
-            @Override
-            protected BindingSet convert(Map.Entry<Statement, BindingSet> stbs) throws QueryEvaluationException {
-                Statement st = stbs.getKey();
-                BindingSet bs = stbs.getValue();
-                QueryBindingSet result = new QueryBindingSet(bs);
-                if (subjVar != null && !result.hasBinding(subjVar.getName())) {
-                    result.addBinding(subjVar.getName(), st.getSubject());
-                }
-                if (predVar != null && !result.hasBinding(predVar.getName())) {
-                    result.addBinding(predVar.getName(), st.getPredicate());
-                }
-                if (objVar != null && !result.hasBinding(objVar.getName())) {
-                    result.addBinding(objVar.getName(), st.getObject());
-                }
-                if (cntxtVar != null && !result.hasBinding(cntxtVar.getName()) && st.getContext() != null) {
-                    result.addBinding(cntxtVar.getName(), st.getContext());
-                }
-                return result;
-            }
-        };
-    }
-
-    @Override
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(TupleExpr expr, BindingSet bindings) throws QueryEvaluationException {
-        if (expr instanceof QueryRoot) {
-            if (displayQueryPlan) {
-//                System.out.println("Tables: ");
-//                System.out.println("--SPO: \t" + RdfCloudTripleStoreConstants.TBL_SPO);
-//                System.out.println("--PO: \t" + RdfCloudTripleStoreConstants.TBL_PO);
-//                System.out.println("--OSP: \t" + RdfCloudTripleStoreConstants.TBL_OSP);
-                logger.info("=================== Rya Query ===================");
-                for (String str : expr.toString().split("\\r?\\n")) {
-                    logger.info(str);
-                }
-                logger.info("================= End Rya Query =================");
-            }
-        }
-        return super.evaluate(expr, bindings);
-    }
-
-    public CloseableIteration evaluate(Slice slice, BindingSet bindings)
-            throws QueryEvaluationException {
-        CloseableIteration result = evaluate(slice.getArg(), bindings);
-        if (slice.hasOffset()) {
-            result = new OffsetIteration(result, slice.getOffset());
-        }
-        if (slice.hasLimit()) {
-            result = new LimitIteration(result, slice.getLimit());
-        }
-        return result;
-    }
-
-    protected Value getVarValue(Var var) {
-        if (var == null)
-            return null;
-        else
-            return var.getValue();
-    }
-
-    public void shutdown() {
-        executorService.shutdownNow();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
deleted file mode 100644
index 0b74c3b..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ParallelJoinIterator.java
+++ /dev/null
@@ -1,138 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import info.aduna.iteration.CloseableIteration;
-import info.aduna.iteration.LookAheadIteration;
-
-import java.util.NoSuchElementException;
-import java.util.Queue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.evaluation.EvaluationStrategy;
-import org.openrdf.query.impl.EmptyBindingSet;
-
-/**
- */
-public class ParallelJoinIterator extends LookAheadIteration<BindingSet, QueryEvaluationException> {
-
-    public static final EmptyBindingSet EMPTY_BINDING_SET = new EmptyBindingSet();
-
-    private final EvaluationStrategy strategy;
-    private final Join join;
-    private final CloseableIteration<BindingSet, QueryEvaluationException> leftIter;
-
-    private ExecutorService executorService;
-    private Queue<ParallelIteratorWork> workQueue = new LinkedBlockingQueue<ParallelIteratorWork>();
-    private ParallelIteratorWork currentWork;
-    private int batch;
-
-    public ParallelJoinIterator(EvaluationStrategy strategy, Join join, BindingSet bindings, ExecutorService executorService, int batch)
-            throws QueryEvaluationException {
-        this.strategy = strategy;
-        this.join = join;
-        leftIter = strategy.evaluate(join.getLeftArg(), bindings);
-
-        this.executorService = executorService;
-        this.batch = batch;
-    }
-
-
-    @Override
-    protected BindingSet getNextElement() throws QueryEvaluationException {
-
-        try {
-            while (leftIter.hasNext() || !workQueue.isEmpty() || currentWork != null) {
-                if (!workQueue.isEmpty() && currentWork == null) {
-                    currentWork = workQueue.poll();
-                }
-
-                if (currentWork != null) {
-                    BindingSet bindingSet = currentWork.queue.poll();
-                    if (EMPTY_BINDING_SET.equals(bindingSet)) {
-                        currentWork = null;
-                        continue;
-                    } else if (bindingSet == null) {
-                        continue;
-                    }
-                    return bindingSet;
-                }
-
-                try {
-                    for (int i = 0; i < batch; i++) {
-                        if (leftIter.hasNext()) {
-                            ParallelIteratorWork work = new ParallelIteratorWork((BindingSet) leftIter.next(), join.getRightArg());
-                            workQueue.add(work);
-                            executorService.execute(work);
-                        } else
-                            break;
-                    }
-                } catch (NoSuchElementException ignore) {
-                }
-            }
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-        return null;
-    }
-
-    @Override
-    protected void handleClose() throws QueryEvaluationException {
-        try {
-            super.handleClose();
-            leftIter.close();
-//           rightIter.close();
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    private class ParallelIteratorWork implements Runnable {
-
-        private BindingSet leftBindingSet;
-        private TupleExpr rightTupleExpr;
-        public LinkedBlockingQueue<BindingSet> queue = new LinkedBlockingQueue<BindingSet>();
-
-        private ParallelIteratorWork(BindingSet leftBindingSet, TupleExpr rightTupleExpr) {
-            this.leftBindingSet = leftBindingSet;
-            this.rightTupleExpr = rightTupleExpr;
-        }
-
-        @Override
-        public void run() {
-            try {
-                CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy.evaluate(rightTupleExpr, leftBindingSet);
-                while (iter.hasNext()) {
-                    queue.add(iter.next());
-                }
-                queue.add(EMPTY_BINDING_SET);
-                iter.close();
-            } catch (QueryEvaluationException e) {
-                throw new RuntimeException(e);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
deleted file mode 100644
index 163f167..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/PushJoinDownVisitor.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-/**
- * Class ReorderJoinVisitor
- * Date: Apr 11, 2011
- * Time: 10:16:15 PM
- */
-public class PushJoinDownVisitor extends QueryModelVisitorBase {
-    @Override
-    public void meet(Join node) throws Exception {
-        super.meet(node);
-
-        TupleExpr leftArg = node.getLeftArg();
-        TupleExpr rightArg = node.getRightArg();
-
-        /**
-         * if join(join(1, 2), join(3,4))
-         * should be:
-         * join(join(join(1,2), 3), 4)
-         */
-        if (leftArg instanceof Join && rightArg instanceof Join) {
-            Join leftJoin = (Join) leftArg;
-            Join rightJoin = (Join) rightArg;
-            TupleExpr right_LeftArg = rightJoin.getLeftArg();
-            TupleExpr right_rightArg = rightJoin.getRightArg();
-            Join inner = new Join(leftJoin, right_LeftArg);
-            Join outer = new Join(inner, right_rightArg);
-            node.replaceWith(outer);
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
deleted file mode 100644
index 69eb4e1..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinOptimizer.java
+++ /dev/null
@@ -1,283 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.algebra.*;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-import org.openrdf.query.algebra.helpers.StatementPatternCollector;
-
-import java.util.*;
-
-/**
- * A query optimizer that re-orders nested Joins.
- *
- * @author Arjohn Kampman
- * @author James Leigh
- */
-public class QueryJoinOptimizer implements QueryOptimizer {
-
-    protected final EvaluationStatistics statistics;
-
-    public QueryJoinOptimizer() {
-        this(new EvaluationStatistics());
-    }
-
-    public QueryJoinOptimizer(EvaluationStatistics statistics) {
-        this.statistics = statistics;
-    }
-
-    /**
-     * Applies generally applicable optimizations: path expressions are sorted
-     * from more to less specific.
-     *
-     * @param tupleExpr
-     */
-    public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
-        tupleExpr.visit(new JoinVisitor());
-    }
-
-    protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
-
-        Set<String> boundVars = new HashSet<String>();
-
-        @Override
-        public void meet(LeftJoin leftJoin) {
-            leftJoin.getLeftArg().visit(this);
-
-            Set<String> origBoundVars = boundVars;
-            try {
-                boundVars = new HashSet<String>(boundVars);
-                boundVars.addAll(leftJoin.getLeftArg().getBindingNames());
-
-                leftJoin.getRightArg().visit(this);
-            } finally {
-                boundVars = origBoundVars;
-            }
-        }
-
-        @Override
-        public void meet(Join node) {
-            Set<String> origBoundVars = boundVars;
-            try {
-                boundVars = new HashSet<String>(boundVars);
-
-                // Recursively get the join arguments
-                List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
-
-                // Build maps of cardinalities and vars per tuple expression
-                Map<TupleExpr, Double> cardinalityMap = new HashMap<TupleExpr, Double>();
-//                Map<TupleExpr, List<Var>> varsMap = new HashMap<TupleExpr, List<Var>>();
-//                Map<Var, Double> varCardinalityMap = new HashMap<Var, Double>();
-
-                for (TupleExpr tupleExpr : joinArgs) {
-                    double cardinality = statistics.getCardinality(tupleExpr);
-//                    List<Var> statementPatternVars = getStatementPatternVars(tupleExpr);
-
-                    cardinalityMap.put(tupleExpr, cardinality);
-//                    varsMap.put(tupleExpr, statementPatternVars);
-                }
-
-                // Build map of var frequences
-//                Map<Var, Integer> varFreqMap = new HashMap<Var, Integer>();
-//                for (List<Var> varList : varsMap.values()) {
-//                    getVarFreqMap(varList, varFreqMap);
-//                }
-
-                // Reorder the (recursive) join arguments to a more optimal sequence
-                List<TupleExpr> orderedJoinArgs = new ArrayList<TupleExpr>(joinArgs.size());
-                while (!joinArgs.isEmpty()) {
-                    TupleExpr tupleExpr = selectNextTupleExpr(joinArgs, cardinalityMap
-                    );
-                    if (tupleExpr == null) {
-                        break;
-                    }
-
-                    joinArgs.remove(tupleExpr);
-                    orderedJoinArgs.add(tupleExpr);
-
-                    // Recursively optimize join arguments
-                    tupleExpr.visit(this);
-
-                    boundVars.addAll(tupleExpr.getBindingNames());
-                }
-
-                // Build new join hierarchy
-                // Note: generated hierarchy is right-recursive to help the
-                // IterativeEvaluationOptimizer to factor out the left-most join
-                // argument
-                int i = 0;
-                TupleExpr replacement = orderedJoinArgs.get(i);
-                for (i++; i < orderedJoinArgs.size(); i++) {
-                    replacement = new Join(replacement, orderedJoinArgs.get(i));
-                }
-
-                // Replace old join hierarchy
-                node.replaceWith(replacement);
-            } finally {
-                boundVars = origBoundVars;
-            }
-        }
-
-        protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
-            if (tupleExpr instanceof Join) {
-                Join join = (Join) tupleExpr;
-                getJoinArgs(join.getLeftArg(), joinArgs);
-                getJoinArgs(join.getRightArg(), joinArgs);
-            } else {
-                joinArgs.add(tupleExpr);
-            }
-
-            return joinArgs;
-        }
-
-        protected List<Var> getStatementPatternVars(TupleExpr tupleExpr) {
-            List<StatementPattern> stPatterns = StatementPatternCollector.process(tupleExpr);
-            List<Var> varList = new ArrayList<Var>(stPatterns.size() * 4);
-            for (StatementPattern sp : stPatterns) {
-                sp.getVars(varList);
-            }
-            return varList;
-        }
-
-        protected <M extends Map<Var, Integer>> M getVarFreqMap(List<Var> varList, M varFreqMap) {
-            for (Var var : varList) {
-                Integer freq = varFreqMap.get(var);
-                freq = (freq == null) ? 1 : freq + 1;
-                varFreqMap.put(var, freq);
-            }
-            return varFreqMap;
-        }
-
-        /**
-         * Selects from a list of tuple expressions the next tuple expression that
-         * should be evaluated. This method selects the tuple expression with
-         * highest number of bound variables, preferring variables that have been
-         * bound in other tuple expressions over variables with a fixed value.
-         */
-        protected TupleExpr selectNextTupleExpr(List<TupleExpr> expressions,
-                                                Map<TupleExpr, Double> cardinalityMap
-//                                                ,Map<TupleExpr, List<Var>> varsMap,
-//                                                Map<Var, Integer> varFreqMap, Set<String> boundVars
-        ) {
-            double lowestCardinality = Double.MAX_VALUE;
-            TupleExpr result = expressions.get(0);
-
-            for (TupleExpr tupleExpr : expressions) {
-                // Calculate a score for this tuple expression
-//                double cardinality = getTupleExprCardinality(tupleExpr, cardinalityMap, varsMap, varFreqMap, boundVars);
-                double cardinality = cardinalityMap.get(tupleExpr);
-//                List<Var> vars = varsMap.get(tupleExpr);
-//                List<Var> distinctUnboundVars = getUnboundVars(vars);
-//                if (distinctUnboundVars.size() >= 2) {
-//                    cardinality *= (distinctUnboundVars.size() + 1);
-//                }
-
-                if (cardinality < lowestCardinality) {
-                    // More specific path expression found
-                    lowestCardinality = cardinality;
-                    result = tupleExpr;
-                }
-            }
-
-            return result;
-        }
-
-        protected double getTupleExprCardinality(TupleExpr tupleExpr, Map<TupleExpr, Double> cardinalityMap,
-                                                 Map<TupleExpr, List<Var>> varsMap, Map<Var, Integer> varFreqMap, Set<String> boundVars) {
-            double cardinality = cardinalityMap.get(tupleExpr);
-
-            List<Var> vars = varsMap.get(tupleExpr);
-
-            // Compensate for variables that are bound earlier in the evaluation
-            List<Var> unboundVars = getUnboundVars(vars);
-            List<Var> constantVars = getConstantVars(vars);
-            int nonConstantVarCount = vars.size() - constantVars.size();
-            if (nonConstantVarCount > 0) {
-                double exp = (double) unboundVars.size() / nonConstantVarCount;
-                cardinality = Math.pow(cardinality, exp);
-            }
-
-            if (unboundVars.isEmpty()) {
-                // Prefer patterns with more bound vars
-                if (nonConstantVarCount > 0) {
-                    cardinality /= nonConstantVarCount;
-                }
-            } else {
-                // Prefer patterns that bind variables from other tuple expressions
-                int foreignVarFreq = getForeignVarFreq(unboundVars, varFreqMap);
-                if (foreignVarFreq > 0) {
-                    cardinality /= foreignVarFreq;
-                }
-            }
-
-            // Prefer patterns that bind more variables
-            List<Var> distinctUnboundVars = getUnboundVars(new
-                    HashSet<Var>(vars));
-            if (distinctUnboundVars.size() >= 2) {
-                cardinality /= distinctUnboundVars.size();
-            }
-
-            return cardinality;
-        }
-
-        protected List<Var> getConstantVars(Iterable<Var> vars) {
-            List<Var> constantVars = new ArrayList<Var>();
-
-            for (Var var : vars) {
-                if (var.hasValue()) {
-                    constantVars.add(var);
-                }
-            }
-
-            return constantVars;
-        }
-
-        protected List<Var> getUnboundVars(Iterable<Var> vars) {
-            List<Var> unboundVars = new ArrayList<Var>();
-
-            for (Var var : vars) {
-                if (!var.hasValue() && !this.boundVars.contains(var.getName())) {
-                    unboundVars.add(var);
-                }
-            }
-
-            return unboundVars;
-        }
-
-        protected int getForeignVarFreq(List<Var> ownUnboundVars, Map<Var, Integer> varFreqMap) {
-            int result = 0;
-
-            Map<Var, Integer> ownFreqMap = getVarFreqMap(ownUnboundVars, new HashMap<Var, Integer>());
-
-            for (Map.Entry<Var, Integer> entry : ownFreqMap.entrySet()) {
-                Var var = entry.getKey();
-                int ownFreq = entry.getValue();
-                result += varFreqMap.get(var) - ownFreq;
-            }
-
-            return result;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
deleted file mode 100644
index 0c7ba2c..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizer.java
+++ /dev/null
@@ -1,259 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
-import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-public class QueryJoinSelectOptimizer implements QueryOptimizer {
-
-  private final EvaluationStatistics statistics;
-  private final SelectivityEvalDAO eval;
-  private final RdfCloudTripleStoreConfiguration config;
-
-  public QueryJoinSelectOptimizer(EvaluationStatistics statistics, SelectivityEvalDAO eval) {
-    System.out.println("Entering join optimizer!");
-    this.statistics = statistics;
-    this.eval = eval;
-    this.config = eval.getConf();
-  }
-
-  /**
-   * Applies generally applicable optimizations: path expressions are sorted from more to less specific.
-   *
-   * @param tupleExpr
-   */
-  public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
-    tupleExpr.visit(new JoinVisitor());
-  }
-
-  protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
-
-    @Override
-    public void meet(Join node) {
-
-      try {
-        if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) {
-          return;
-        }
-
-        TupleExpr partialQuery = null;
-        List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
-        Map<TupleExpr,Double> cardinalityMap = new HashMap<TupleExpr,Double>();
-
-        for (TupleExpr tupleExpr : joinArgs) {
-          double cardinality = statistics.getCardinality(tupleExpr);
-          cardinalityMap.put(tupleExpr, cardinality);
-
-        }
-
-        while (!joinArgs.isEmpty()) {
-          TePairCost tpc = getBestTupleJoin(partialQuery, joinArgs);
-          List<TupleExpr> tePair = tpc.getTePair();
-          if (partialQuery == null) {
-            if (tePair.size() != 2) {
-              throw new IllegalStateException();
-            }
-            if (!(tePair.get(0) instanceof Join)) {
-              tePair.get(0).visit(this);
-            }
-            if (!(tePair.get(1) instanceof Join)) {
-              tePair.get(1).visit(this);
-            }
-            if (tePair.get(1) instanceof Join) {
-              partialQuery = new Join(tePair.get(0), ((Join) tePair.get(1)).getLeftArg());
-              partialQuery = new Join(partialQuery, ((Join) tePair.get(1)).getRightArg());
-              joinArgs.remove(tePair.get(0));
-              joinArgs.remove(tePair.get(1));
-            } else {
-              partialQuery = new Join(tePair.get(0), tePair.get(1));
-              joinArgs.remove(tePair.get(0));
-              joinArgs.remove(tePair.get(1));
-            }
-          } else {
-            if (tePair.size() != 1) {
-              throw new IllegalStateException();
-            }
-            if (!(tePair.get(0) instanceof Join)) {
-              tePair.get(0).visit(this);
-            }
-
-            if (tePair.get(0) instanceof Join) {
-              partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getLeftArg());
-              partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getRightArg());
-              joinArgs.remove(tePair.get(0));
-
-            } else {
-              partialQuery = new Join(partialQuery, tePair.get(0));
-              joinArgs.remove(tePair.get(0));
-            }
-          }
-
-        }
-
-        // Replace old join hierarchy
-        node.replaceWith(partialQuery);
-
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-    }
-
-    protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
-      if (tupleExpr instanceof Join) {
-        if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) {
-          Join join = (Join) tupleExpr;
-          getJoinArgs(join.getLeftArg(), joinArgs);
-          getJoinArgs(join.getRightArg(), joinArgs);
-        } else {
-          joinArgs.add(tupleExpr);
-        }
-      } else {
-        joinArgs.add(tupleExpr);
-      }
-
-      return joinArgs;
-    }
-
-    public TePairCost getBestTupleJoin(TupleExpr partialQuery, List<TupleExpr> teList) throws Exception {
-
-      double tempCost = 0;
-      double bestCost = Double.MAX_VALUE;
-      List<TupleExpr> bestJoinNodes = new ArrayList<TupleExpr>();
-
-      if (partialQuery == null) {
-
-        double jSelect = 0;
-        double card1 = 0;
-        double card2 = 0;
-        TupleExpr teMin1 = null;
-        TupleExpr teMin2 = null;
-        double bestCard1 = 0;
-        double bestCard2 = 0;
-
-        for (int i = 0; i < teList.size(); i++) {
-          for (int j = i + 1; j < teList.size(); j++) {
-            jSelect = eval.getJoinSelect(config, teList.get(i), teList.get(j));
-            card1 = statistics.getCardinality(teList.get(i));
-            card2 = statistics.getCardinality(teList.get(j));
-            tempCost = card1 + card2 + card1 * card2 * jSelect;
-//             System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
-//             + jSelect + ", and nodes are "
-//             + teList.get(i) + " and " + teList.get(j));
-
-            // TODO this generates a nullpointer exception if tempCost = Double.Max
-            if (bestCost > tempCost) {
-
-              teMin1 = teList.get(i);
-              teMin2 = teList.get(j);
-              bestCard1 = card1;
-              bestCard2 = card2;
-              bestCost = tempCost;
-
-              if (bestCost == 0) {
-                bestJoinNodes.add(teMin1);
-                bestJoinNodes.add(teMin2);
-                return new TePairCost(0.0, bestJoinNodes);
-              }
-            }
-          }
-        }
-
-        if (bestCard1 < bestCard2) {
-
-          bestJoinNodes.add(teMin1);
-          bestJoinNodes.add(teMin2);
-
-        } else {
-          bestJoinNodes.add(teMin2);
-          bestJoinNodes.add(teMin1);
-        }
-        //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + jSelect + ", and best cost is" + bestCost);
-        return new TePairCost(bestCost, bestJoinNodes);
-
-      } else {
-        double card1 = statistics.getCardinality(partialQuery);
-        TupleExpr bestTe = null;
-        double card2 = 0;
-        double select = 0;
-
-        for (TupleExpr te : teList) {
-          select = eval.getJoinSelect(config, partialQuery, te);
-          card2 = statistics.getCardinality(te);
-          tempCost = card1 + card2 + card1 * card2 * select;
-//          System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
-//                  + select + ", and nodes are "
-//                  + partialQuery + " and " + te);
-
-
-          if (bestCost > tempCost) {
-            bestTe = te;
-            bestCost = tempCost;
-          }
-
-        }
-        List<TupleExpr> teList2 = new ArrayList<TupleExpr>();
-        teList2.add(bestTe);
-        //System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + select + ", and best cost is" + bestCost);
-        return new TePairCost(bestCost, teList2);
-      }
-
-    }
-
-    // **************************************************************************************
-    public class TePairCost {
-
-      private double cost;
-      private List<TupleExpr> tePair;
-
-      public TePairCost(double cost, List<TupleExpr> tePair) {
-        this.cost = cost;
-        this.tePair = tePair;
-
-      }
-
-      public double getCost() {
-        return cost;
-      }
-
-      public List<TupleExpr> getTePair() {
-        return tePair;
-      }
-
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
deleted file mode 100644
index 4ae8fcb..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreEvaluationStatistics.java
+++ /dev/null
@@ -1,280 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import static com.google.common.base.Preconditions.checkNotNull;
-//import static RdfCloudTripleStoreUtils.getTtlValueConverter;
-
-
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.persist.RdfEvalStatsDAO;
-import mvm.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF;
-import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.query.algebra.BinaryTupleOperator;
-import org.openrdf.query.algebra.Filter;
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.Projection;
-import org.openrdf.query.algebra.Slice;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.UnaryTupleOperator;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
-
-/**
- * Class RdfCloudTripleStoreEvaluationStatistics
- * Date: Apr 12, 2011
- * Time: 1:31:05 PM
- */
-public class RdfCloudTripleStoreEvaluationStatistics extends EvaluationStatistics {
-
-    private RdfCloudTripleStoreConfiguration conf;
-    private RdfEvalStatsDAO rdfEvalStatsDAO;
-    protected boolean pushEmptyRdfTypeDown = true;
-    protected boolean useCompositeCardinalities = true;
-
-    public RdfCloudTripleStoreEvaluationStatistics(RdfCloudTripleStoreConfiguration conf, RdfEvalStatsDAO rdfEvalStatsDAO) {
-        checkNotNull(conf);
-        checkNotNull(rdfEvalStatsDAO);
-        try {
-            this.conf = conf;
-            this.rdfEvalStatsDAO = rdfEvalStatsDAO;
-            pushEmptyRdfTypeDown = conf.isStatsPushEmptyRdftypeDown();
-            useCompositeCardinalities = conf.isUseCompositeCardinality();
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    @Override
-    public double getCardinality(TupleExpr expr) {
-        if (expr instanceof Filter) {
-            Filter f = (Filter) expr;
-            // filters must make sets smaller
-            return super.getCardinality(f.getArg()) / 10;
-        }
-        return super.getCardinality(expr);
-    }
-
-    @Override
-    protected CardinalityCalculator createCardinalityCalculator() {
-        return new RdfCloudTripleStoreCardinalityCalculator(this);
-    }
-
-    public RdfEvalStatsDAO getRdfEvalStatsDAO() {
-        return rdfEvalStatsDAO;
-    }
-
-    public void setRdfEvalStatsDAO(RdfEvalStatsDAO rdfEvalStatsDAO) {
-        this.rdfEvalStatsDAO = rdfEvalStatsDAO;
-    }
-
-    public class RdfCloudTripleStoreCardinalityCalculator extends CardinalityCalculator {
-        private RdfCloudTripleStoreEvaluationStatistics statistics;
-        protected Map<Var, Collection<Statement>> fspMap;
-
-        public RdfCloudTripleStoreCardinalityCalculator(RdfCloudTripleStoreEvaluationStatistics statistics) {
-            this.statistics = statistics;
-        }
-        
- 
-        @Override
-        protected double getCardinality(StatementPattern sp) {
-            Var subjectVar = sp.getSubjectVar();
-            Resource subj = (Resource) getConstantValue(subjectVar);
-            Var predicateVar = sp.getPredicateVar();
-            URI pred = (URI) getConstantValue(predicateVar);
-            Var objectVar = sp.getObjectVar();
-            Value obj = getConstantValue(objectVar);
-            Resource context = (Resource) getConstantValue(sp.getContextVar());
-
-            // set rdf type to be a max value (as long as the object/subject aren't specified) to 
-                if (pred != null) {
-                    if (statistics.pushEmptyRdfTypeDown && RDF.TYPE.equals(pred) && subj == null && obj == null) {
-                        return Double.MAX_VALUE;
-                    }
-                }
-
-            // FixedStatementPattern indicates that this is when backward chaining reasoning is being used
-            if (sp instanceof FixedStatementPattern) {
-                //no query here
-                FixedStatementPattern fsp = (FixedStatementPattern) sp;
-                //TODO: assume that only the subject is open ended here
-                Var fspSubjectVar = fsp.getSubjectVar();
-                if (fspSubjectVar != null && fspSubjectVar.getValue() == null) {
-                    if (fspMap == null) {
-                        fspMap = new HashMap<Var, Collection<Statement>>();
-                    }
-                    fspMap.put(fspSubjectVar, fsp.statements);
-                }
-                return fsp.statements.size();
-            }
-
-            /**
-             * Use the output of the FixedStatementPattern to determine more information
-             */
-            if (fspMap != null && sp instanceof DoNotExpandSP) {
-                //TODO: Might be a better way than 3 map pulls
-                RdfEvalStatsDAO.CARDINALITY_OF cardinality_of = null;
-                Collection<Statement> statements = null;
-                // TODO unsure of how to incorporate additional cardinalities here
-                if (objectVar != null && objectVar.getValue() == null) {
-                    statements = fspMap.get(objectVar);
-                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.OBJECT;
-                }
-                if (statements == null && predicateVar != null && predicateVar.getValue() == null) {
-                    statements = fspMap.get(predicateVar);
-                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE;
-                }
-                if (statements == null && subjectVar != null && subjectVar.getValue() == null) {
-                    statements = fspMap.get(subjectVar);
-                    cardinality_of = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT;
-                }
-                if (statements != null) {
-                    double fspCard = 0;
-                    for (Statement statement : statements) {
-                    	List<Value> values = new ArrayList<Value>();
-                    	values.add(statement.getSubject());
-                    	fspCard  += rdfEvalStatsDAO.getCardinality(conf, cardinality_of, values, context);
-                    }
-                    return fspCard;
-                }
-            }
-
-            /**
-             * We put full triple scans before rdf:type because more often than not
-             * the triple scan is being joined with something else that is better than
-             * asking the full rdf:type of everything.
-             */
-            double cardinality = Double.MAX_VALUE - 1;
-            try {
-                if (subj != null) {
-                	List<Value> values = new ArrayList<Value>();
-                	CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECT;
-            		values.add(subj);
-            		if (useCompositeCardinalities){
-                   	    if (pred != null){
-                    		values.add(pred);
-                    		card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTPREDICATE;
-                    	}
-                   	    else if (obj != null){
-                    		values.add(obj);
-                    		card = RdfEvalStatsDAO.CARDINALITY_OF.SUBJECTOBJECT;
-                   	    }
-            		}
-                	double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context);
-                	// the cardinality will be -1 if there was no value found (if the index does not exist)
-                    if (evalCard >= 0) {
-                        cardinality = Math.min(cardinality, evalCard);
-                    } else {
-                        cardinality = 1;
-                    }
-                }
-                else if (pred != null) {
-                	List<Value> values = new ArrayList<Value>();
-                	CARDINALITY_OF card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATE;
-            		values.add(pred);
-            		if (useCompositeCardinalities){
-                   	    if (obj != null){
-                    		values.add(obj);
-                    		card = RdfEvalStatsDAO.CARDINALITY_OF.PREDICATEOBJECT;
-                   	    }
-            		}
-                	double evalCard = evalCard = rdfEvalStatsDAO.getCardinality(conf, card, values, context);
-                    if (evalCard >= 0) {
-                        cardinality = Math.min(cardinality, evalCard);
-                    } else {
-                        cardinality = 1;
-                    }
-                }
-                else if (obj != null) {
-                	List<Value> values = new ArrayList<Value>();
-            		values.add(obj);
-                    double evalCard = rdfEvalStatsDAO.getCardinality(conf, RdfEvalStatsDAO.CARDINALITY_OF.OBJECT, values, context);
-                    if (evalCard >= 0) {
-                        cardinality = Math.min(cardinality, evalCard);
-                    } else {
-                        cardinality = 1;
-                    }
-                }
-            } catch (Exception e) {
-                throw new RuntimeException(e);
-            }
-
-            return cardinality;
-        }
-
-        @Override
-        protected void meetUnaryTupleOperator(UnaryTupleOperator node) {
-            if (node instanceof Projection) {
-                cardinality += -1.0;
-            }
-            super.meetUnaryTupleOperator(node);
-        }
-
-        @Override
-        protected void meetBinaryTupleOperator(BinaryTupleOperator node) {
-            node.getLeftArg().visit(this);
-            double leftArgCost = cardinality;
-            node.getRightArg().visit(this);
-            cardinality += leftArgCost;
-        }
-        
-        // TODO Is this sufficient for add capability of slice node?
-        @Override
-        public void meet(Slice node) {
-            cardinality = node.getLimit();
-        }
-        
-
-        @Override
-        public void meet(Join node) {
-            node.getLeftArg().visit(this);
-            double leftArgCost = cardinality;
-            node.getRightArg().visit(this);
-            if (leftArgCost > cardinality) {
-                cardinality = leftArgCost;    //TODO: Is this ok?
-            }
-        }
-
-        protected Value getConstantValue(Var var) {
-            if (var != null)
-                return var.getValue();
-            else
-                return null;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
deleted file mode 100644
index b9c6669..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatistics.java
+++ /dev/null
@@ -1,127 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.persist.RdfEvalStatsDAO;
-import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
-import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.StatementPattern;
-
-public class RdfCloudTripleStoreSelectivityEvaluationStatistics extends RdfCloudTripleStoreEvaluationStatistics {
-
-  // allows access to join selectivity and extending RdfCloudTripleStoreEvaluationStatistics allows for use of prospector
-  private SelectivityEvalDAO selectEvalStatsDAO; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains
-                                                 // RdfEvalStatsDAO object
-
-  protected double filterCard;
-  RdfCloudTripleStoreConfiguration config; // TODO redundancy here as RdfCloudTripleStoreEvalStats object contains conf as well
-
-  public RdfCloudTripleStoreSelectivityEvaluationStatistics(RdfCloudTripleStoreConfiguration conf,
-      RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> prospector, SelectivityEvalDAO selectEvalStatsDAO) {
-
-    super(conf, prospector);
-    checkNotNull(selectEvalStatsDAO);
-
-    try {
-      this.selectEvalStatsDAO = selectEvalStatsDAO;
-      this.config = conf; // TODO fix this!
-    } catch (Exception e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  protected CardinalityCalculator createCardinalityCalculator() {
-    try {
-      return new SelectivityCardinalityCalculator(this);
-    } catch (Exception e) {
-      System.out.println(e);
-      throw new RuntimeException(e);
-    }
-  }
-
-  public class SelectivityCardinalityCalculator extends RdfCloudTripleStoreCardinalityCalculator {
-
-    public SelectivityCardinalityCalculator(RdfCloudTripleStoreSelectivityEvaluationStatistics statistics) {
-      super(statistics);
-    }
-
-    @Override
-    public void meet(Join node) {
-      node.getLeftArg().visit(this);
-      double leftArgCost = cardinality;
-      // System.out.println("Left cardinality is " + cardinality);
-      node.getRightArg().visit(this);
-
-      if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) {
-        return;
-      }
-
-      try {
-        double selectivity = selectEvalStatsDAO.getJoinSelect(config, node.getLeftArg(), node.getRightArg());
-//        System.out.println("CardCalc: left cost of " + node.getLeftArg() + " is " + leftArgCost + " right cost of "
-//        + node.getRightArg() + " is " + cardinality);
-//         System.out.println("Right cardinality is " + cardinality);
-        cardinality += leftArgCost + leftArgCost * cardinality * selectivity;
-//        System.out.println("CardCalc: Cardinality is " + cardinality);
-//        System.out.println("CardCalc: Selectivity is " + selectivity);
-        // System.out.println("Join cardinality is " + cardinality);
-
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-
-    }
-    
-    
-    
-    
-        @Override
-        public double getCardinality(StatementPattern node) {
-
-            cardinality = super.getCardinality(node);
-
-            // If sp contains all variables or is EmptyRDFtype, assign
-            // cardinality
-            // equal to table size
-            if (cardinality == Double.MAX_VALUE || cardinality == Double.MAX_VALUE - 1) {
-                try {
-                    cardinality = selectEvalStatsDAO.getTableSize(config);
-                } catch (Exception e) {
-                    // TODO Auto-generated catch block
-                    e.printStackTrace();
-                }
-            }
-
-            return cardinality;
-        }
-    
-    
-    
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
deleted file mode 100644
index 916aff0..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/ReorderJoinVisitor.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-/**
- * Class ReorderJoinVisitor
- * Date: Apr 11, 2011
- * Time: 10:16:15 PM
- */
-public class ReorderJoinVisitor extends QueryModelVisitorBase {
-    @Override
-    public void meet(Join node) throws Exception {
-        super.meet(node);
-        
-        TupleExpr leftArg = node.getLeftArg();
-        TupleExpr rightArg = node.getRightArg();
-
-        /**
-         * if join(stmtPattern1, join(stmtPattern2, anything)
-         * Should be
-         * join(join(stmtPattern1, stmtPattern2), anything)
-         */
-        if (leftArg instanceof StatementPattern && rightArg instanceof Join) {
-            Join rightJoin = (Join) rightArg;
-            //find the stmtPattern in the right side
-            TupleExpr right_LeftArg = rightJoin.getLeftArg();
-            TupleExpr right_rightArg = rightJoin.getRightArg();
-            if (right_LeftArg instanceof StatementPattern || right_rightArg instanceof StatementPattern) {
-                StatementPattern stmtPattern = null;
-                TupleExpr anything = null;
-                if (right_LeftArg instanceof StatementPattern) {
-                    stmtPattern = (StatementPattern) right_LeftArg;
-                    anything = right_rightArg;
-                } else {
-                    stmtPattern = (StatementPattern) right_rightArg;
-                    anything = right_LeftArg;
-                }
-
-                Join inner = new Join(leftArg, stmtPattern);
-                Join outer = new Join(inner, anything);
-                node.replaceWith(outer);
-            }
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
deleted file mode 100644
index e1757e4..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/evaluation/SeparateFilterJoinsVisitor.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package mvm.rya.rdftriplestore.evaluation;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.*;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-/**
- * TODO: This might be a very bad thing. It may force all AND and not allow ORs?. Depends on how they do the bindings.
- * Class SeparateFilterJoinsVisitor
- * Date: Apr 11, 2011
- * Time: 10:16:15 PM
- */
-public class SeparateFilterJoinsVisitor extends QueryModelVisitorBase {
-    @Override
-    public void meet(Filter node) throws Exception {
-        super.meet(node);
-
-        ValueExpr condition = node.getCondition();
-        TupleExpr arg = node.getArg();
-        if (!(arg instanceof Join)) {
-            return;
-        }
-
-        Join join = (Join) arg;
-        TupleExpr leftArg = join.getLeftArg();
-        TupleExpr rightArg = join.getRightArg();
-
-        if (leftArg instanceof StatementPattern && rightArg instanceof StatementPattern) {
-            Filter left = new Filter(leftArg, condition);
-            Filter right = new Filter(rightArg, condition);
-            node.replaceWith(new Join(left, right));
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
deleted file mode 100644
index a7badec..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/AbstractInferVisitor.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
-import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
-import mvm.rya.rdftriplestore.utils.TransitivePropertySP;
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Union;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Class AbstractInferVisitor
- * Date: Mar 14, 2012
- * Time: 5:33:01 PM
- */
-public class AbstractInferVisitor extends QueryModelVisitorBase {
-
-    static Var EXPANDED = new Var("infer-expanded");
-
-    boolean include = true;
-
-    RdfCloudTripleStoreConfiguration conf;
-    InferenceEngine inferenceEngine;
-
-    public AbstractInferVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
-        checkNotNull(conf, "Configuration cannot be null");
-        checkNotNull(inferenceEngine, "Inference Engine cannot be null");
-        this.conf = conf;
-        this.inferenceEngine = inferenceEngine;
-    }
-
-    @Override
-    public void meet(StatementPattern sp) throws Exception {
-        if (!include) {
-            return;
-        }
-        if (sp instanceof FixedStatementPattern || sp instanceof TransitivePropertySP || sp instanceof DoNotExpandSP) {
-            return;   //already inferred somewhere else
-        }
-        final Var predVar = sp.getPredicateVar();
-        //we do not let timeRange preds be inferred, not good
-        if (predVar == null || predVar.getValue() == null
-//                || RdfCloudTripleStoreUtils.getTtlValueConverter(conf, (URI) predVar.getValue()) != null
-                ) {
-            return;
-        }
-        meetSP(sp);
-    }
-
-    protected void meetSP(StatementPattern sp) throws Exception {
-
-    }
-
-    @Override
-    public void meet(Union node) throws Exception {
-//        if (!(node instanceof InferUnion))
-        super.meet(node);
-    }
-
-    @Override
-    public void meet(Join node) throws Exception {
-        if (!(node instanceof InferJoin)) {
-            super.meet(node);
-        }
-    }
-
-    public RdfCloudTripleStoreConfiguration getConf() {
-        return conf;
-    }
-
-    public void setConf(RdfCloudTripleStoreConfiguration conf) {
-        this.conf = conf;
-    }
-
-    public InferenceEngine getInferenceEngine() {
-        return inferenceEngine;
-    }
-
-    public void setInferenceEngine(InferenceEngine inferenceEngine) {
-        this.inferenceEngine = inferenceEngine;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
deleted file mode 100644
index 457087f..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/DoNotExpandSP.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-
-/**
- * Class DoNotExpandSP
- * Date: Mar 15, 2012
- * Time: 9:39:45 AM
- */
-public class DoNotExpandSP extends StatementPattern{
-    public DoNotExpandSP() {
-    }
-
-    public DoNotExpandSP(Var subject, Var predicate, Var object) {
-        super(subject, predicate, object);
-    }
-
-    public DoNotExpandSP(Scope scope, Var subject, Var predicate, Var object) {
-        super(scope, subject, predicate, object);
-    }
-
-    public DoNotExpandSP(Var subject, Var predicate, Var object, Var context) {
-        super(subject, predicate, object, context);
-    }
-
-    public DoNotExpandSP(Scope scope, Var subjVar, Var predVar, Var objVar, Var conVar) {
-        super(scope, subjVar, predVar, objVar, conVar);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
deleted file mode 100644
index 4d2a753..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferConstants.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-/**
- * Interface InferConstants
- * Date: Apr 16, 2011
- * Time: 7:30:47 AM
- */
-public interface InferConstants {
-
-    public static final String INFERRED = "inferred";
-    public static final String TRUE = "true";
-    public static final String FALSE = "false";
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java
----------------------------------------------------------------------
diff --git a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java b/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java
deleted file mode 100644
index 6d7be93..0000000
--- a/sail/rya.sail.impl/src/main/java/mvm/rya/rdftriplestore/inference/InferJoin.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package mvm.rya.rdftriplestore.inference;
-
-/*
- * #%L
- * mvm.rya.rya.sail.impl
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
- */
-
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.TupleExpr;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Class InferJoin
- * Date: Apr 16, 2011
- * Time: 7:29:40 AM
- */
-public class InferJoin extends Join {
-
-    private Map<String, String> properties = new HashMap<String, String>();
-
-    public InferJoin() {
-    }
-
-    public InferJoin(TupleExpr leftArg, TupleExpr rightArg) {
-        super(leftArg, rightArg);
-    }
-
-    public Map<String, String> getProperties() {
-        return properties;
-    }
-
-}



[47/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java
index 30c43d9..f5de2de 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/BooleanRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java
index e530ed4..4b80679 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ByteRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java
index 6393cbb..ae93e0f 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/CustomDatatypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.primitives.Bytes;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.resolver.RyaTypeResolverException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java
index a93652c..cb3e7cf 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java
index ba85a0b..88daa0f 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java
index 46c3ef7..2969a4b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/FloatRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java
index 24f0028..2f6c727 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java
index cf017fa..e073495 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/LongRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java
index 22743f6..3f4d6b8 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaTypeResolverImpl.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.primitives.Bytes;
 import mvm.rya.api.domain.RyaRange;
 import mvm.rya.api.domain.RyaType;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java
index daa3721..8f8bf00 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/RyaURIResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaURI;
 import org.openrdf.model.vocabulary.XMLSchema;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java
index bc886b4..ce3f05b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ServiceBackedRyaTypeResolverMappings.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolver;
 import mvm.rya.api.resolver.RyaTypeResolverMapping;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java
index ff859c9..dba9773 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/impl/ShortRyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.RyaTypeResolverException;
 import org.calrissian.mango.types.LexiTypeEncoders;
 import org.calrissian.mango.types.TypeEncoder;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java
index ee731c4..f825e86 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRow.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Arrays;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java
index 353191d..36d23df 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowRegex.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Arrays;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java
index 53250af..2ccc986 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaType;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java
index 7c286d2..d1824b1 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/TripleRowResolverException.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Date: 7/17/12
  * Time: 7:35 AM

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
index 61e1330..1fd3f1b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.primitives.Bytes;
 
 import mvm.rya.api.domain.RyaStatement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
index c637fce..dc0695b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.triple.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.primitives.Bytes;
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaType;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java b/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java
index 314ed62..61b14d9 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/security/SecurityProvider.java
@@ -1,5 +1,25 @@
 package mvm.rya.api.security;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import javax.servlet.http.HttpServletRequest;
 
 public interface SecurityProvider {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java b/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java
index f947c94..f3e5479 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/utils/CloseableIterableIteration.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java b/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java
index 9baa619..b098e52 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/utils/EnumerationWrapper.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Enumeration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java b/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java
index 8860d99..86748f9 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/utils/IteratorWrapper.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java b/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java
index c5f7622..dfa17e8 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/utils/NullableStatementImpl.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Resource;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java b/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java
index 70b89d0..297c950 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/utils/PeekingCloseableIteration.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import info.aduna.iteration.CloseableIteration;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java b/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java
index 0bb9420..0fc2a7f 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementAddBindingSetFunction.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Function;
 import mvm.rya.api.RdfCloudTripleStoreUtils;
 import mvm.rya.api.domain.RyaStatement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java b/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java
index 0bd3ca1..b39fafe 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/utils/RyaStatementRemoveBindingSetCloseableIteration.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.persist.RyaDAOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java b/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java
index 66d6134..3966679 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/domain/RyaURIPrefixTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java b/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java
index 6eae3d0..40a9c68 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/persist/query/RyaQueryTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaURI;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java
index be234a3..919e4cc 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategyTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java
index 0ecec9b..81e9468 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategyTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy.wholerow;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 import mvm.rya.api.domain.*;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java
index f726333..f751866 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategyTest.java
@@ -1,24 +1,25 @@
 package mvm.rya.api.query.strategy.wholerow;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+
 //
 import java.util.Map;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java
index 5b91c88..ddb7fa8 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/MockRdfCloudConfiguration.java
@@ -1,5 +1,25 @@
 package mvm.rya.api.query.strategy.wholerow;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 
 public class MockRdfCloudConfiguration extends RdfCloudTripleStoreConfiguration {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java
index abc23e9..57b27be 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategyTest.java
@@ -1,24 +1,24 @@
-//package mvm.rya.api.query.strategy.wholerow;
-
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.rya.api.query.strategy.wholerow;
+
 //
 //import junit.framework.TestCase;
 //import mvm.rya.api.RdfCloudTripleStoreConstants;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java
index 50546e4..1079bf8 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategyTest.java
@@ -1,24 +1,24 @@
-//package mvm.rya.api.query.strategy.wholerow;
-
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.rya.api.query.strategy.wholerow;
+
 //
 //import junit.framework.TestCase;
 //import mvm.rya.api.RdfCloudTripleStoreConstants;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java
index 280b532..019a3aa 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategyTest.java
@@ -1,24 +1,24 @@
-//package mvm.rya.api.query.strategy.wholerow;
-
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.rya.api.query.strategy.wholerow;
+
 //
 //import junit.framework.TestCase;
 //import mvm.rya.api.RdfCloudTripleStoreConstants;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java
index 1d92499..4363fd3 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/RyaContextTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Map;
 
 import junit.framework.TestCase;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java
index 814eb58..0126414 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/CustomDatatypeResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.api.domain.RyaType;
 import org.openrdf.model.impl.URIImpl;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java
index bce0a46..7dfa8ea 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DateTimeRyaTypeResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static org.junit.Assert.*;
 
 import java.util.Date;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java
index 9c53664..2c5f43d 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.api.domain.RyaType;
 import org.openrdf.model.vocabulary.XMLSchema;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java
index c8a739a..60cded0 100644
--- a/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java
+++ b/common/rya.api/src/test/java/mvm/rya/api/resolver/impl/IntegerRyaTypeResolverTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver.impl;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.api.domain.RyaType;
 import org.openrdf.model.vocabulary.XMLSchema;



[38/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java
index 7adf9ba..9200770 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ParseException.java
@@ -3,25 +3,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 /**
  * This exception is thrown when parse errors are encountered.
  * You can explicitly create objects of this exception type by

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java
index 8848174..65e7125 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.java
@@ -2,25 +2,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 import java.io.StringReader;
 
 public class QueryParser/*@bgen(jjtree)*/implements QueryParserTreeConstants, QueryParserConstants {/*@bgen(jjtree)*/

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt
index 51a1fd3..a215b68 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParser.jjt
@@ -6,7 +6,26 @@
  * ASTTerm slightly modified versions of the auto-generated files.
  * 
  * I highly recommend the "JavaCC Eclipse Plug-in".
- */options{
+ */
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+options{
   MULTI = true;  JDK_VERSION = "1.5";  IGNORE_CASE = true;  NODE_DEFAULT_VOID=true;
   static = false;
 //  DEBUG_PARSER = true;
@@ -68,4 +87,4 @@ void AndExpression() #Expression(>1):{ jjtThis.setType(ASTExpression.AND); }
     }
   )
   
-}
\ No newline at end of file
+}


[50/56] [abbrv] incubator-rya git commit: RYA-13 Add delete support to secondary indices

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
index 8ca96bc..c204f3c 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
@@ -22,16 +22,12 @@ package mvm.rya.indexing.accumulo.geo;
 
 
 import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement;
-import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
 import java.util.UUID;
 
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-
 import org.apache.accumulo.core.client.admin.TableOperations;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
@@ -55,6 +51,10 @@ import com.vividsolutions.jts.geom.Polygon;
 import com.vividsolutions.jts.geom.PrecisionModel;
 import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
 public class GeoIndexerTest {
 
     private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints();
@@ -89,42 +89,41 @@ public class GeoIndexerTest {
     @Test
     public void testRestrictPredicatesSearch() throws Exception {
         conf.setStrings(ConfigUtils.GEO_PREDICATES_LIST, "pred:1,pred:2");
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        Point point = gf.createPoint(new Coordinate(10, 10));
-        Value pointValue = vf.createLiteral("Point(10 10)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        URI invalidPredicate = GeoConstants.GEO_AS_WKT;
+            ValueFactory vf = new ValueFactoryImpl();
 
-        // These should not be stored because they are not in the predicate list
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), invalidPredicate, pointValue)));
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), invalidPredicate, pointValue)));
+            Point point = gf.createPoint(new Coordinate(10, 10));
+            Value pointValue = vf.createLiteral("Point(10 10)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            URI invalidPredicate = GeoConstants.GEO_AS_WKT;
 
-        URI pred1 = vf.createURI("pred:1");
-        URI pred2 = vf.createURI("pred:2");
+            // These should not be stored because they are not in the predicate list
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), invalidPredicate, pointValue)));
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), invalidPredicate, pointValue)));
 
-        // These should be stored because they are in the predicate list
-        Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1, pointValue);
-        Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2, pointValue);
-        f.storeStatement(convertStatement(s3));
-        f.storeStatement(convertStatement(s4));
+            URI pred1 = vf.createURI("pred:1");
+            URI pred2 = vf.createURI("pred:2");
 
-        // This should not be stored because the object is not valid wkt
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1, vf.createLiteral("soint(10 10)"))));
+            // These should be stored because they are in the predicate list
+            Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1, pointValue);
+            Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2, pointValue);
+            f.storeStatement(convertStatement(s3));
+            f.storeStatement(convertStatement(s4));
 
-        // This should not be stored because the object is not a literal
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj6"), pred1, vf.createURI("p:Point(10 10)"))));
+            // This should not be stored because the object is not valid wkt
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1, vf.createLiteral("soint(10 10)"))));
 
-        f.flush();
+            // This should not be stored because the object is not a literal
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj6"), pred1, vf.createURI("p:Point(10 10)"))));
 
-        Set<Statement> actual = getSet(f.queryEquals(point, EMPTY_CONSTRAINTS));
-        Assert.assertEquals(2, actual.size());
-        Assert.assertTrue(actual.contains(s3));
-        Assert.assertTrue(actual.contains(s4));
+            f.flush();
 
-        f.close();
+            Set<Statement> actual = getSet(f.queryEquals(point, EMPTY_CONSTRAINTS));
+            Assert.assertEquals(2, actual.size());
+            Assert.assertTrue(actual.contains(s3));
+            Assert.assertTrue(actual.contains(s4));
+        }
     }
 
     private static <X> Set<X> getSet(CloseableIteration<X, ?> iter) throws Exception {
@@ -137,234 +136,264 @@ public class GeoIndexerTest {
 
     @Test
     public void testPrimeMeridianSearch() throws Exception {
-        
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(0 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] ONE = { 1, 1, -1, 1, -1, -1, 1, -1, 1, 1 };
-        double[] TWO = { 2, 2, -2, 2, -2, -2, 2, -2, 2, 2 };
-        double[] THREE = { 3, 3, -3, 3, -3, -3, 3, -3, 3, 3 };
-
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
-        LinearRing r2 = gf.createLinearRing(new PackedCoordinateSequence.Double(TWO, 2));
-        LinearRing r3 = gf.createLinearRing(new PackedCoordinateSequence.Double(THREE, 2));
-
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-        Polygon p2 = gf.createPolygon(r2, new LinearRing[] {});
-        Polygon p3 = gf.createPolygon(r3, new LinearRing[] {});
-
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p2, EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p3, EMPTY_CONSTRAINTS)));
-
-        // Test a ring with a hole in it
-        Polygon p3m2 = gf.createPolygon(r3, new LinearRing[] { r2 });
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p3m2, EMPTY_CONSTRAINTS)));
-
-        // test a ring outside the point
-        double[] OUT = { 3, 3, 1, 3, 1, 1, 3, 1, 3, 3 };
-        LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
-        Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(0 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] ONE = { 1, 1, -1, 1, -1, -1, 1, -1, 1, 1 };
+            double[] TWO = { 2, 2, -2, 2, -2, -2, 2, -2, 2, 2 };
+            double[] THREE = { 3, 3, -3, 3, -3, -3, 3, -3, 3, 3 };
+
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+            LinearRing r2 = gf.createLinearRing(new PackedCoordinateSequence.Double(TWO, 2));
+            LinearRing r3 = gf.createLinearRing(new PackedCoordinateSequence.Double(THREE, 2));
+
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Polygon p2 = gf.createPolygon(r2, new LinearRing[] {});
+            Polygon p3 = gf.createPolygon(r3, new LinearRing[] {});
+
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p2, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p3, EMPTY_CONSTRAINTS)));
+
+            // Test a ring with a hole in it
+            Polygon p3m2 = gf.createPolygon(r3, new LinearRing[] { r2 });
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p3m2, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            double[] OUT = { 3, 3, 1, 3, 1, 1, 3, 1, 3, 3 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
     }
 
     @Test
     public void testDcSearch() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
-
-        // test a ring outside the point
-        double[] OUT = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
-        LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
-        Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            double[] OUT = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
     }
 
     @Test
-    public void testDcSearchWithContext() throws Exception {
+    public void testDeleteSearch() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            f.deleteStatement(convertStatement(statement));
+
+            // test a ring that the point would be inside of if not deleted
+            double[] in = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(in, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring that the point would be outside of if not deleted
+            double[] out = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(out, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+
+            // test a ring for the whole world and make sure the point is gone
+            double[] world = { -180, 90, 180, 90, -180, 90, -180, -90, -180, 90 };
+            LinearRing rWorld = gf.createLinearRing(new PackedCoordinateSequence.Double(world, 2));
+            Polygon pWorld = gf.createPolygon(rWorld, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pWorld, EMPTY_CONSTRAINTS)));
+        }
+    }
 
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
+    @Test
+    public void testDcSearchWithContext() throws Exception {
+        // test a ring around dc
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
 
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
 
-        // query with correct context
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setContext(context))));
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
 
-        // query with wrong context
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+            // query with correct context
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setContext(context))));
 
-        f.close();
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+        }
     }
 
     @Test
     public void testDcSearchWithSubject() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-        
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-
-        // query with correct subject
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setSubject(subject))));
-
-        // query with wrong subject
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct subject
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setSubject(subject))));
+
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
+        }
     }
 
     @Test
     public void testDcSearchWithSubjectAndContext() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
 
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
 
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
 
-        // query with correct context subject
-        Assert.assertEquals(Sets.newHashSet(statement),
-                getSet(f.queryWithin(p1, new StatementContraints().setContext(context).setSubject(subject))));
+            // query with correct context subject
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(context).setSubject(subject))));
 
-        // query with wrong context
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
 
-        // query with wrong subject
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
-
-        f.close();
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
+        }
     }
 
     @Test
     public void testDcSearchWithPredicate() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-
-        // query with correct Predicate
-        Assert.assertEquals(Sets.newHashSet(statement),
-                getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(predicate)))));
-
-        // query with wrong predicate
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(vf.createURI("other:pred"))))));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct Predicate
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(predicate)))));
+
+            // query with wrong predicate
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(vf.createURI("other:pred"))))));
+        }
     }
 
     // @Test
     public void testAntiMeridianSearch() throws Exception {
         // verify that a search works if the bounding box crosses the anti meridian
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource context = vf.createURI("foo:context");
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource context = vf.createURI("foo:context");
 
-        Resource subjectEast = vf.createURI("foo:subj:east");
-        URI predicateEast = GeoConstants.GEO_AS_WKT;
-        Value objectEast = vf.createLiteral("Point(179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Statement statementEast = new ContextStatementImpl(subjectEast, predicateEast, objectEast, context);
-        f.storeStatement(convertStatement(statementEast));
+            Resource subjectEast = vf.createURI("foo:subj:east");
+            URI predicateEast = GeoConstants.GEO_AS_WKT;
+            Value objectEast = vf.createLiteral("Point(179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Statement statementEast = new ContextStatementImpl(subjectEast, predicateEast, objectEast, context);
+            f.storeStatement(convertStatement(statementEast));
 
-        Resource subjectWest = vf.createURI("foo:subj:west");
-        URI predicateWest = GeoConstants.GEO_AS_WKT;
-        Value objectWest = vf.createLiteral("Point(-179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Statement statementWest = new ContextStatementImpl(subjectWest, predicateWest, objectWest, context);
-        f.storeStatement(convertStatement(statementWest));
+            Resource subjectWest = vf.createURI("foo:subj:west");
+            URI predicateWest = GeoConstants.GEO_AS_WKT;
+            Value objectWest = vf.createLiteral("Point(-179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Statement statementWest = new ContextStatementImpl(subjectWest, predicateWest, objectWest, context);
+            f.storeStatement(convertStatement(statementWest));
 
-        f.flush();
+            f.flush();
 
-        double[] ONE = { 178.1, 1, -178, 1, -178, -1, 178.1, -1, 178.1, 1 };
+            double[] ONE = { 178.1, 1, -178, 1, -178, -1, 178.1, -1, 178.1, 1 };
 
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
 
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
 
-        Assert.assertEquals(Sets.newHashSet(statementEast, statementWest), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
-
-        f.close();
+            Assert.assertEquals(Sets.newHashSet(statementEast, statementWest), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
index 60d237d..1c6628f 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
@@ -1,6 +1,3 @@
-/**
- * 
- */
 package mvm.rya.indexing.accumulo.temporal;
 
 /*
@@ -26,7 +23,6 @@ package mvm.rya.indexing.accumulo.temporal;
 import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
-import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
 import java.io.PrintStream;
@@ -42,15 +38,6 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicLong;
 
-import junit.framework.Assert;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.TemporalInstant;
-import mvm.rya.indexing.TemporalInterval;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-
-import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Scanner;
@@ -59,6 +46,7 @@ import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.admin.TableOperations;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.security.Authorizations;
 import org.apache.commons.codec.binary.StringUtils;
 import org.apache.commons.io.output.NullOutputStream;
 import org.apache.hadoop.conf.Configuration;
@@ -78,25 +66,34 @@ import org.openrdf.query.QueryEvaluationException;
 
 import com.beust.jcommander.internal.Lists;
 
+import info.aduna.iteration.CloseableIteration;
+import junit.framework.Assert;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.TemporalInstant;
+import mvm.rya.indexing.TemporalInterval;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.StatementSerializer;
+
 /**
  * JUnit tests for TemporalIndexer and it's implementation AccumuloTemporalIndexer
- * 
+ *
  * If you enjoy this test, please read RyaTemporalIndexerTest and YagoKBTest, which contain
  * many example SPARQL queries and updates and attempts to test independently of Accumulo:
- * 
+ *
  *     extras/indexingSail/src/test/java/mvm/rya/indexing/accumulo/RyaTemporalIndexerTest.java
  *     {@link mvm.rya.indexing.accumulo.RyaTemporalIndexerTest}
  *     {@link mvm.rya.indexing.accumulo.YagoKBTest.java}
- *     
+ *
  * Remember, this class in instantiated fresh for each @test method.
  * so fields are reset, unless they are static.
- * 
+ *
  * These are covered:
- *   Instance {before, equals, after} given Instance 
+ *   Instance {before, equals, after} given Instance
  *   Instance {before, after, inside} given Interval
  *   Instance {hasBeginning, hasEnd} given Interval
  * And a few more.
- * 
+ *
  */
 public final class AccumuloTemporalIndexerTest {
     // Configuration properties, this is reset per test in setup.
@@ -268,7 +265,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Test method for {@link AccumuloTemporalIndexer#TemporalIndexerImpl(org.apache.hadoop.conf.Configuration)} .
-     * 
+     *
      * @throws TableExistsException
      * @throws TableNotFoundException
      * @throws AccumuloSecurityException
@@ -283,7 +280,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)}
-     * 
+     *
      * @throws NoSuchAlgorithmException
      */
     @Test
@@ -339,7 +336,41 @@ public final class AccumuloTemporalIndexerTest {
         Assert.assertEquals("Number of rows stored.", rowsStoredExpected*4, rowsStoredActual); // 4 index entries per statement
 
     }
-    
+
+    @Test
+    public void testDelete() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException {
+        // count rows expected to store:
+        int rowsStoredExpected = 0;
+
+        ValueFactory vf = new ValueFactoryImpl();
+
+        URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
+        URI pred2_circa = vf.createURI(URI_PROPERTY_CIRCA);
+
+        final String testDate2014InBRST = "2014-12-31T23:59:59-02:00";
+        final String testDate2016InET = "2016-12-31T20:59:59-05:00";
+
+        // These should be stored because they are in the predicate list.
+        // BUT they will get converted to the same exact datetime in UTC.
+        Statement s1 = new StatementImpl(vf.createURI("foo:subj3"), pred1_atTime, vf.createLiteral(testDate2014InBRST));
+        Statement s2 = new StatementImpl(vf.createURI("foo:subj4"), pred2_circa, vf.createLiteral(testDate2016InET));
+        tIndexer.storeStatement(convertStatement(s1));
+        rowsStoredExpected++;
+        tIndexer.storeStatement(convertStatement(s2));
+        rowsStoredExpected++;
+
+        tIndexer.flush();
+
+        int rowsStoredActual = printTables("junit testing: Temporal entities stored in testDelete before delete", System.out, null);
+        Assert.assertEquals("Number of rows stored.", rowsStoredExpected*4, rowsStoredActual); // 4 index entries per statement
+
+        tIndexer.deleteStatement(convertStatement(s1));
+        tIndexer.deleteStatement(convertStatement(s2));
+
+        int afterDeleteRowsStoredActual = printTables("junit testing: Temporal entities stored in testDelete after delete", System.out, null);
+        Assert.assertEquals("Number of rows stored after delete.", 0, afterDeleteRowsStoredActual);
+    }
+
     @Test
     public void testStoreStatementWithInterestingLiterals() throws Exception {
         ValueFactory vf = new ValueFactoryImpl();
@@ -356,36 +387,35 @@ public final class AccumuloTemporalIndexerTest {
     }
 
     /**
-	     * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)}
-	     * 
-	     * @throws NoSuchAlgorithmException
-	     */
-	    @Test
-	    public void testStoreStatementBadInterval() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException {
-	        // count rows expected to store:
-	        int rowsStoredExpected = 0;
-	
-	        ValueFactory vf = new ValueFactoryImpl();
-	        URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
-	
-	        // Test: Should not store an improper date interval, and log a warning (log warning not tested).
-	        final String invalidDateIntervalString="[bad,interval]";
-			// Silently logs a warning for bad dates.
-            tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), pred1_atTime, vf.createLiteral(invalidDateIntervalString))));
-
-	        final String validDateIntervalString="[2016-12-31T20:59:59-05:00,2016-12-31T21:00:00-05:00]";
-            tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), pred1_atTime, vf.createLiteral(validDateIntervalString))));
-            rowsStoredExpected++;
-            
-	        tIndexer.flush();
-	
-	        int rowsStoredActual = printTables("junit testing: Temporal intervals stored in testStoreStatement", null, null);
-	        Assert.assertEquals("Only good intervals should be stored.", rowsStoredExpected*2, rowsStoredActual); // 2 index entries per interval statement
-	    }
-
-	@Test
-    public void testStoreStatementsSameTime() throws IOException, NoSuchAlgorithmException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException
-    {
+     * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)}
+     *
+     * @throws NoSuchAlgorithmException
+     */
+    @Test
+    public void testStoreStatementBadInterval() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException {
+        // count rows expected to store:
+        int rowsStoredExpected = 0;
+
+        ValueFactory vf = new ValueFactoryImpl();
+        URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
+
+        // Test: Should not store an improper date interval, and log a warning (log warning not tested).
+        final String invalidDateIntervalString="[bad,interval]";
+        // Silently logs a warning for bad dates.
+        tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), pred1_atTime, vf.createLiteral(invalidDateIntervalString))));
+
+        final String validDateIntervalString="[2016-12-31T20:59:59-05:00,2016-12-31T21:00:00-05:00]";
+        tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), pred1_atTime, vf.createLiteral(validDateIntervalString))));
+        rowsStoredExpected++;
+
+        tIndexer.flush();
+
+        int rowsStoredActual = printTables("junit testing: Temporal intervals stored in testStoreStatement", null, null);
+        Assert.assertEquals("Only good intervals should be stored.", rowsStoredExpected*2, rowsStoredActual); // 2 index entries per interval statement
+    }
+
+    @Test
+    public void testStoreStatementsSameTime() throws IOException, NoSuchAlgorithmException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
         ValueFactory vf = new ValueFactoryImpl();
         URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
         URI pred2_circa = vf.createURI(URI_PROPERTY_CIRCA);
@@ -416,7 +446,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Test method for {@link AccumuloTemporalIndexer#storeStatements(java.util.Collection)} .
-     * 
+     *
      * @throws TableExistsException
      * @throws TableNotFoundException
      * @throws AccumuloSecurityException
@@ -482,9 +512,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Test instant equal to a given instant.
      * From the series: instant {equal, before, after} instant
-     * @throws AccumuloSecurityException 
-     * @throws AccumuloException 
-     * @throws TableNotFoundException 
+     * @throws AccumuloSecurityException
+     * @throws AccumuloException
+     * @throws TableNotFoundException
      */
     @Test
     public void testQueryInstantEqualsInstant() throws IOException, QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException {
@@ -525,9 +555,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Test instant after a given instant.
      * From the series: instant {equal, before, after} instant
-     * @throws AccumuloSecurityException 
-     * @throws AccumuloException 
-     * @throws TableNotFoundException 
+     * @throws AccumuloSecurityException
+     * @throws AccumuloException
+     * @throws TableNotFoundException
      */
     @Test
     public void testQueryInstantAfterInstant() throws IOException, QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException {
@@ -579,7 +609,7 @@ public final class AccumuloTemporalIndexerTest {
         }
         tIndexer.flush();
         CloseableIteration<Statement, QueryEvaluationException> iter;
-        
+
         iter = tIndexer.queryInstantBeforeInstant(seriesTs[searchForSeconds], EMPTY_CONSTRAINTS);
         int count = 0;
         while (iter.hasNext()) {
@@ -763,9 +793,9 @@ public final class AccumuloTemporalIndexerTest {
      * Test method for
      * {@link mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer#queryIntervalEquals(TemporalInterval, StatementContraints)}
      * .
-     * @throws IOException 
-     * @throws QueryEvaluationException 
-     * 
+     * @throws IOException
+     * @throws QueryEvaluationException
+     *
      */
     @Test
     public void testQueryIntervalEquals() throws IOException, QueryEvaluationException {
@@ -789,9 +819,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Test interval before a given interval, for method:
      * {@link AccumuloTemporalIndexer#queryIntervalBefore(TemporalInterval, StatementContraints)}.
-     * 
-     * @throws IOException 
-     * @throws QueryEvaluationException 
+     *
+     * @throws IOException
+     * @throws QueryEvaluationException
      */
     @Test
     public void testQueryIntervalBefore() throws IOException, QueryEvaluationException {
@@ -804,7 +834,7 @@ public final class AccumuloTemporalIndexerTest {
         // instants should be ignored.
         tIndexer.storeStatement(convertStatement(spo_B30_E32));
         tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds
-        tIndexer.storeStatement(convertStatement(seriesSpo[2])); 
+        tIndexer.storeStatement(convertStatement(seriesSpo[2]));
         tIndexer.storeStatement(convertStatement(seriesSpo[31]));
         tIndexer.flush();
 
@@ -819,9 +849,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * interval is after the given interval.  Find interval beginnings after the endings of the given interval.
      * {@link AccumuloTemporalIndexer#queryIntervalAfter(TemporalInterval, StatementContraints).
-     * 
-     * @throws IOException 
-     * @throws QueryEvaluationException 
+     *
+     * @throws IOException
+     * @throws QueryEvaluationException
      */
     @Test
     public void testQueryIntervalAfter() throws IOException, QueryEvaluationException {
@@ -837,7 +867,7 @@ public final class AccumuloTemporalIndexerTest {
         // instants should be ignored.
         tIndexer.storeStatement(convertStatement(spo_B02));
         tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds
-        tIndexer.storeStatement(convertStatement(seriesSpo[2])); 
+        tIndexer.storeStatement(convertStatement(seriesSpo[2]));
         tIndexer.storeStatement(convertStatement(seriesSpo[31]));
         tIndexer.flush();
 
@@ -874,7 +904,7 @@ public final class AccumuloTemporalIndexerTest {
         URI pred3_CIRCA_ = vf.createURI(URI_PROPERTY_CIRCA);  // this one to ignore.
         URI pred2_eventTime = vf.createURI(URI_PROPERTY_EVENT_TIME);
         URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
-        
+
         // add the predicate = EventTime ; Store in an array for verification.
         Statement[] SeriesTs_EventTime = new Statement[expectedResultCount+1];
         for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here
@@ -892,7 +922,7 @@ public final class AccumuloTemporalIndexerTest {
         CloseableIteration<Statement, QueryEvaluationException> iter;
         StatementContraints constraints = new StatementContraints();
         constraints.setPredicates(new HashSet<URI>(Arrays.asList( pred2_eventTime,  pred1_atTime )));
-         
+
         iter = tIndexer.queryInstantAfterInstant(seriesTs[searchForSeconds], constraints); // EMPTY_CONSTRAINTS);//
         int count_AtTime = 0;
         int count_EventTime = 0;
@@ -910,17 +940,17 @@ public final class AccumuloTemporalIndexerTest {
             } else {
                 assertTrue("This predicate should not be returned: "+s, false);
             }
-                
+
         }
-        
+
         Assert.assertEquals("Should find count of atTime    rows.", expectedResultCount, count_AtTime);
         Assert.assertEquals("Should find count of eventTime rows.", expectedResultCount, count_EventTime);
     }
 
-    
+
     /**
      * Test method for {@link AccumuloTemporalIndexer#getIndexablePredicates()} .
-     * 
+     *
      * @throws TableExistsException
      * @throws TableNotFoundException
      * @throws AccumuloSecurityException
@@ -936,7 +966,7 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Count all the entries in the temporal index table, return the count.
      * Uses printTables for reliability.
-     * 
+     *
      */
     public int countAllRowsInTable() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, NoSuchAlgorithmException {
         return printTables("Counting rows.", null, null);
@@ -944,7 +974,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Print and gather statistics on the entire index table.
-     * 
+     *
      * @param description
      *            Printed to the console to find the test case.
      * @param out
@@ -967,19 +997,18 @@ public final class AccumuloTemporalIndexerTest {
         out.println("Reading : " + this.uniquePerTestTemporalIndexTableName);
         out.format(FORMAT, "--Row--", "--ColumnFamily--", "--ColumnQualifier--", "--Value--");
 
-        Scanner s = ConfigUtils.getConnector(conf).createScanner(this.uniquePerTestTemporalIndexTableName, Constants.NO_AUTHS);
+        Scanner s = ConfigUtils.getConnector(conf).createScanner(this.uniquePerTestTemporalIndexTableName, Authorizations.EMPTY);
         for (Entry<Key, org.apache.accumulo.core.data.Value> entry : s) {
             rowsPrinted++;
             Key k = entry.getKey();
-            out.format(FORMAT, toHumanString(k.getRow()), 
-            		toHumanString(k.getColumnFamily()), 
-            		toHumanString(k.getColumnQualifier()), 
-            		toHumanString(entry.getValue()));
+            out.format(FORMAT, toHumanString(k.getRow()),
+                    toHumanString(k.getColumnFamily()),
+                    toHumanString(k.getColumnQualifier()),
+                    toHumanString(entry.getValue()));
             keyHasher = hasher(keyHasher, (StringUtils.getBytesUtf8(entry.getKey().toStringNoTime())));
             valueHasher = hasher(valueHasher, (entry.getValue().get()));
         }
         out.println();
-        // }
 
         if (statistics != null) {
             statistics.put(STAT_COUNT, (long) rowsPrinted);
@@ -994,7 +1023,7 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Order independent hashcode.
      * Read more: http://stackoverflow.com/questions/18021643/hashing-a-set-of-integers-in-an-order-independent-way
-     * 
+     *
      * @param hashcode
      * @param list
      * @return
@@ -1013,28 +1042,28 @@ public final class AccumuloTemporalIndexerTest {
      * @param value
      * @return Human readable representation.
      */
-	static String toHumanString(Value value) {
-		return toHumanString(value==null?null:value.get());
-	}
-	static String toHumanString(Text text) {
-		return toHumanString(text==null?null:text.copyBytes());
-	}
-	static String toHumanString(byte[] bytes) {
-		if (bytes==null) 
-			return "{null}";
-		StringBuilder sb = new StringBuilder();
-		for (byte b : bytes) {
-			if ((b > 0x7e) || (b < 32)) {
-				sb.append("{");
-				sb.append(Integer.toHexString( b & 0xff )); // Lop off the sign extended ones.
-				sb.append("}");
-			} else if (b == '{'||b == '}') { // Escape the literal braces.
-				sb.append("{");
-				sb.append((char)b);
-				sb.append("}");
-			} else
-				sb.append((char)b);
-		}
-		return sb.toString();
-	}
+    static String toHumanString(Value value) {
+        return toHumanString(value==null?null:value.get());
+    }
+    static String toHumanString(Text text) {
+        return toHumanString(text==null?null:text.copyBytes());
+    }
+    static String toHumanString(byte[] bytes) {
+        if (bytes==null)
+            return "{null}";
+        StringBuilder sb = new StringBuilder();
+        for (byte b : bytes) {
+            if ((b > 0x7e) || (b < 32)) {
+                sb.append("{");
+                sb.append(Integer.toHexString( b & 0xff )); // Lop off the sign extended ones.
+                sb.append("}");
+            } else if (b == '{'||b == '}') { // Escape the literal braces.
+                sb.append("{");
+                sb.append((char)b);
+                sb.append("}");
+            } else
+                sb.append((char)b);
+        }
+        return sb.toString();
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexingExample/src/main/java/EntityDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/EntityDirectExample.java b/extras/indexingExample/src/main/java/EntityDirectExample.java
index ae83520..04b4f74 100644
--- a/extras/indexingExample/src/main/java/EntityDirectExample.java
+++ b/extras/indexingExample/src/main/java/EntityDirectExample.java
@@ -21,11 +21,6 @@
 
 import java.util.List;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.TableNotFoundException;
@@ -47,6 +42,11 @@ import org.openrdf.repository.sail.SailRepository;
 import org.openrdf.repository.sail.SailRepositoryConnection;
 import org.openrdf.sail.Sail;
 
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
 public class EntityDirectExample {
     private static final Logger log = Logger.getLogger(EntityDirectExample.class);
 
@@ -59,18 +59,18 @@ public class EntityDirectExample {
     private static final String INSTANCE = "instance";
     private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
     private static final String AUTHS = "U";
-    
+
     public static void main(String[] args) throws Exception {
         Configuration conf = getConf();
         conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-        
+
         log.info("Creating the tables as root.");
         SailRepository repository = null;
         SailRepositoryConnection conn = null;
-      
+
         try {
             log.info("Connecting to Indexing Sail Repository.");
-            
+
             Sail extSail = RyaSailFactory.getInstance(conf);
             repository = new SailRepository(extSail);
             repository.initialize();
@@ -80,7 +80,7 @@ public class EntityDirectExample {
             testAddAndDelete(conn);
             log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
             testAddAndTemporalSearchWithPCJ(conn);
-            
+
         } finally {
             log.info("Shutting down");
             closeQuietly(conn);
@@ -108,10 +108,10 @@ public class EntityDirectExample {
         }
     }
 
-    
 
 
-   
+
+
     public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
             RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
             AccumuloException, AccumuloSecurityException, TableNotFoundException {
@@ -127,7 +127,7 @@ public class EntityDirectExample {
 
         Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
         update.execute();
-        
+
         query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
                 + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
         CountingResultHandler resultHandler = new CountingResultHandler();
@@ -138,29 +138,28 @@ public class EntityDirectExample {
         Validate.isTrue(resultHandler.getCount() == 1);
         resultHandler.resetCount();
 
-        //TODO delete currently not implemented in AccumuloRyaDAO for 
-//        // Delete Data
-//        query = "DELETE DATA\n" //
-//                + "{ GRAPH <http://updated/test> {\n"
-//                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-//                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-//
-//        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-//        update.execute();
-//
-//        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
-//                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
-//        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(resultHandler);
-//        log.info("Result count : " + resultHandler.getCount());
-//
-//        Validate.isTrue(resultHandler.getCount() == 0);
+        // Delete Data
+        query = "DELETE DATA\n" //
+                + "{ GRAPH <http://updated/test> {\n"
+                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
+
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+
+        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
+                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+
+        Validate.isTrue(resultHandler.getCount() == 0);
     }
-    
-    
 
-    
-    
+
+
+
+
     private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
 
         // create some resources and literals to make statements out of
@@ -178,7 +177,7 @@ public class EntityDirectExample {
 
         Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
         update.execute();
-        
+
         String queryString = "PREFIX pref: <http://www.model/pref#> \n" //
                 + "SELECT ?x ?z \n" //
                 + "WHERE { \n"
@@ -187,8 +186,8 @@ public class EntityDirectExample {
                 + "  ?x pref:hasProperty2 'property2' . \n"//
                 + "  ?x pref:hasProperty3 'property3' . \n"//
                 + "}";//
-       
-        
+
+
 
         TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         CountingResultHandler tupleHandler = new CountingResultHandler();
@@ -196,7 +195,7 @@ public class EntityDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
         Validate.isTrue(tupleHandler.getBsSize() == 2);
-        
+
         queryString = "PREFIX pref: <http://www.model/pref#> \n" //
                 + "SELECT ?x ?w ?z \n" //
                 + "WHERE { \n"
@@ -204,29 +203,29 @@ public class EntityDirectExample {
                 + "  ?x pref:hasProperty4 'property4' . \n"//
                 + "  ?x pref:hasProperty5 ?w . \n"//
                 + "}";//
-       
-        
+
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
         Validate.isTrue(tupleHandler.getBsSize() == 3);
-        
-        
-        queryString = "PREFIX pref: <http://www.model/pref#> " 
-                + "SELECT ?v ?w ?x ?y ?z " 
-                + "WHERE { " 
-                + "  ?w a ?z  . " 
-                + "  ?w pref:hasProperty1 ?v . " 
-                + "  ?w pref:hasProperty2 'property2' . " 
-                + "  ?w pref:hasProperty3 'property3' . " 
+
+
+        queryString = "PREFIX pref: <http://www.model/pref#> "
+                + "SELECT ?v ?w ?x ?y ?z "
+                + "WHERE { "
+                + "  ?w a ?z  . "
+                + "  ?w pref:hasProperty1 ?v . "
+                + "  ?w pref:hasProperty2 'property2' . "
+                + "  ?w pref:hasProperty3 'property3' . "
                 + "  ?x a ?z  . "
-                + "  ?x pref:hasProperty4 'property4' . " 
-                + "  ?x pref:hasProperty5 ?y . " 
+                + "  ?x pref:hasProperty4 'property4' . "
+                + "  ?x pref:hasProperty5 ?y . "
                 + "}";
-       
-        
+
+
 
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
@@ -234,10 +233,10 @@ public class EntityDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
         Validate.isTrue(tupleHandler.getBsSize() == 5);
-        
+
     }
-    
-    
+
+
     private static Configuration getConf() {
 
         AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
@@ -254,7 +253,7 @@ public class EntityDirectExample {
 
         return conf;
     }
-    
+
 
     private static class CountingResultHandler implements TupleQueryResultHandler {
         private int count = 0;
@@ -264,11 +263,11 @@ public class EntityDirectExample {
         public int getCount() {
             return count;
         }
-        
+
         public int getBsSize() {
             return bindingSize;
         }
-        
+
         public void resetBsSize() {
             bindingSize = 0;
             bsSizeSet = false;
@@ -298,14 +297,10 @@ public class EntityDirectExample {
 
         @Override
         public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
 
         @Override
         public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexingExample/src/main/java/RyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/RyaDirectExample.java b/extras/indexingExample/src/main/java/RyaDirectExample.java
index b3e8dae..0d2df3f 100644
--- a/extras/indexingExample/src/main/java/RyaDirectExample.java
+++ b/extras/indexingExample/src/main/java/RyaDirectExample.java
@@ -20,13 +20,6 @@
 
 import java.util.List;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.geo.GeoConstants;
-import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
@@ -60,6 +53,13 @@ import org.openrdf.repository.sail.SailRepositoryConnection;
 import org.openrdf.sail.Sail;
 import org.openrdf.sail.SailException;
 
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.geo.GeoConstants;
+import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
+
 public class RyaDirectExample {
     private static final Logger log = Logger.getLogger(RyaDirectExample.class);
 
@@ -72,27 +72,27 @@ public class RyaDirectExample {
     private static final String INSTANCE = "instance";
     private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
     private static final String AUTHS = "";
-    
-    
-    
+
+
+
     public static void main(String[] args) throws Exception {
         Configuration conf = getConf();
         conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-        
+
         log.info("Creating the tables as root.");
 //        createTables(addRootConf(conf), conf);
 
         SailRepository repository = null;
         SailRepositoryConnection conn = null;
-      
+
         try {
             log.info("Connecting to Indexing Sail Repository.");
-            
+
             Sail extSail = RyaSailFactory.getInstance(conf);
             repository = new SailRepository(extSail);
             repository.initialize();
             conn = repository.getConnection();
-            
+
             createPCJ(conn);
 
             long start = System.currentTimeMillis();
@@ -110,6 +110,12 @@ public class RyaDirectExample {
             testTemporalFreeGeoSearch(conn);
             log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
             testGeoFreetextWithPCJSearch(conn);
+            log.info("Running SPARQL Example: Delete Temporal Data");
+            testDeleteTemporalData(conn);
+            log.info("Running SPARQL Example: Delete Free Text Data");
+            testDeleteFreeTextData(conn);
+            log.info("Running SPARQL Example: Delete Geo Data");
+            testDeleteGeoData(conn);
 
             log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
         } finally {
@@ -201,10 +207,10 @@ public class RyaDirectExample {
 
         Validate.isTrue(resultHandler.getCount() == 0);
     }
-    
-    
+
+
     private static void testPCJSearch(SailRepositoryConnection conn) throws Exception {
-        
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
@@ -222,7 +228,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-           
+
      // ///////////// search for bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
                 + "SELECT ?e ?c ?l ?o " //
@@ -236,13 +242,13 @@ public class RyaDirectExample {
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 2);    
-        
+        Validate.isTrue(tupleHandler.getCount() == 2);
+
     }
-    
 
-    
-    
+
+
+
     private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
 
         // create some resources and literals to make statements out of
@@ -254,15 +260,16 @@ public class RyaDirectExample {
                 + "     time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^<http://www.w3.org/2001/XMLSchema#dateTime> ;\n" //   2 seconds
                 + "     time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" //   3 seconds
                 + "     time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" //   4 seconds
-                + "     time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"   
-                + "     time:inXSDDateTime '2006-01-01' ;\n" 
-                + "     time:inXSDDateTime '2007-01-01' ;\n" 
+                + "     time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"
+                + "     time:inXSDDateTime '2006-01-01' ;\n"
+                + "     time:inXSDDateTime '2007-01-01' ;\n"
                 + "     time:inXSDDateTime '2008-01-01' ; .\n"
                 + "}";
 
         Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
         update.execute();
 
+
         // Find all stored dates.
         String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
                 + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
@@ -271,15 +278,15 @@ public class RyaDirectExample {
                 + "  ?event time:inXSDDateTime ?time . \n"//
                 + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
                 + "}";//
-       
-        
+
+
 
         TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         CountingResultHandler tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 5);
-        
+
         // Find all stored dates.
         queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
                 + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
@@ -320,10 +327,6 @@ public class RyaDirectExample {
     }
 
 
-
-
-
-
     private static void testAddAndFreeTextSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
         // add data to the repository using the SailRepository add methods
         ValueFactory f = conn.getValueFactory();
@@ -338,7 +341,7 @@ public class RyaDirectExample {
         uuid = "urn:people:bobss";
         conn.add(f.createURI(uuid), RDF.TYPE, person);
         conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
-        
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
@@ -355,7 +358,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-        
+
 
         // ///////////// search for alice and bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
@@ -370,7 +373,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 2);
-        
+
      // ///////////// search for alice and bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
                 + "SELECT ?person ?match " //
@@ -385,8 +388,8 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-        
-        
+
+
         // ///////////// search for bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
                 + "SELECT ?person ?match ?e ?c ?l ?o " //
@@ -421,11 +424,11 @@ public class RyaDirectExample {
 
         Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
         u.execute();
-        
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
-        
+
         // point outside search ring
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -442,7 +445,8 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 0);
-        
+
+
         // point inside search ring
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -457,14 +461,14 @@ public class RyaDirectExample {
                 + "  ?point geo:asWKT ?wkt . "//
                 + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
                 + "}";//
-         
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-        
-             
+
+
         // point inside search ring with Pre-Computed Join
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -479,7 +483,7 @@ public class RyaDirectExample {
                 + "  ?point geo:asWKT ?wkt . "//
                 + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
                 + "}";//
-         
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
@@ -505,7 +509,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 0);
-        
+
         // point inside search ring with different Pre-Computed Join
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -526,12 +530,12 @@ public class RyaDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
     }
-    
-    
-    private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
+
+
+    private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException,
     RepositoryException, UpdateExecutionException, TupleQueryResultHandlerException, QueryEvaluationException {
-        
-        
+
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
@@ -556,21 +560,21 @@ public class RyaDirectExample {
                 + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
                 + "  FILTER(fts:text(?match, \"pal*\")) " //
                 + "}";//
-        
-        
-        
+
+
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
 
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5); 
-        
+        Validate.isTrue(tupleHandler.getCount() == 5);
+
     }
-    
-    
-    
-    private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
+
+
+
+    private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException,
     RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException {
      // ring outside point
         String queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
@@ -596,12 +600,122 @@ public class RyaDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
     }
-    
-    
-    
-    private static void createPCJ(SailRepositoryConnection conn) 
+
+
+    private static void testDeleteTemporalData(SailRepositoryConnection conn) throws Exception {
+        // Delete all stored dates
+        String sparqlDelete = "PREFIX time: <http://www.w3.org/2006/time#>\n"
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+                + "DELETE {\n" //
+                + "  ?event time:inXSDDateTime ?time . \n"
+                + "}\n"
+                + "WHERE { \n"
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "}";//
+
+        Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlDelete);
+        deleteUpdate.execute();
+
+
+        // Find all stored dates.
+        String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+                + "SELECT ?event ?time \n" //
+                + "WHERE { \n"
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+                + "}";//
+
+
+        CountingResultHandler tupleHandler = new CountingResultHandler();
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+
+    private static void testDeleteFreeTextData(SailRepositoryConnection conn) throws Exception {
+        // Delete data from the repository using the SailRepository remove methods
+        ValueFactory f = conn.getValueFactory();
+        URI person = f.createURI("http://example.org/ontology/Person");
+
+        String uuid;
+
+        uuid = "urn:people:alice";
+        conn.remove(f.createURI(uuid), RDF.TYPE, person);
+        conn.remove(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
+
+        uuid = "urn:people:bobss";
+        conn.remove(f.createURI(uuid), RDF.TYPE, person);
+        conn.remove(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
+
+        conn.remove(person, RDFS.LABEL, f.createLiteral("label", "en"));
+
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // Find all
+        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?person ?match " //
+                + "{" //
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                + "  ?person a <http://example.org/ontology/Person> . "//
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+
+    private static void testDeleteGeoData(SailRepositoryConnection conn) throws Exception {
+        // Delete all stored points
+        String sparqlDelete = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "DELETE {\n" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "}\n"
+                + "WHERE { \n"
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "}";//
+
+        Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlDelete);
+        deleteUpdate.execute();
+
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // Find all stored points
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt " //
+                + "{" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+
+    private static void createPCJ(SailRepositoryConnection conn)
             throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException {
-        
+
         String queryString1 = ""//
                 + "SELECT ?e ?c ?l ?o " //
                 + "{" //
@@ -609,7 +723,7 @@ public class RyaDirectExample {
                 + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
                 + "  ?e <uri:talksTo> ?o . "//
                 + "}";//
-        
+
         String queryString2 = ""//
                 + "SELECT ?e ?c ?l ?o " //
                 + "{" //
@@ -617,8 +731,8 @@ public class RyaDirectExample {
                 + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
                 + "  ?e <uri:talksTo> ?o . "//
                 + "}";//
-        
-        
+
+
         URI obj,subclass,talksTo;
         URI person = new URIImpl("urn:people:alice");
         URI feature = new URIImpl("urn:feature");
@@ -632,33 +746,24 @@ public class RyaDirectExample {
         conn.add(sub, RDF.TYPE, subclass);
         conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
         conn.add(sub, talksTo, obj);
-       
-        AccumuloIndexSet ais1 = null; 
-        AccumuloIndexSet ais2 = null; 
+
+        AccumuloIndexSet ais1 = null;
+        AccumuloIndexSet ais2 = null;
         String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
         String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
 
         Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes()));
         accCon.tableOperations().create(tablename1);
         accCon.tableOperations().create(tablename2);
-        
+
         try {
             ais1 = new AccumuloIndexSet(queryString1, conn, accCon, tablename1);
             ais2 = new AccumuloIndexSet(queryString2, conn, accCon, tablename2);
-        } catch (MalformedQueryException e) {
-            e.printStackTrace();
-        } catch (SailException e) {
-            e.printStackTrace();
-        } catch (QueryEvaluationException e) {
-            e.printStackTrace();
-        } catch (MutationsRejectedException e) {
-            e.printStackTrace();
-        } catch (TableNotFoundException e) {
-            e.printStackTrace();
+        } catch (MalformedQueryException | SailException | QueryEvaluationException | MutationsRejectedException | TableNotFoundException e) {
+            log.error("Error creating Accumulo Index", e);
         }
-        
     }
-    
+
 
     private static class CountingResultHandler implements TupleQueryResultHandler {
         private int count = 0;
@@ -687,14 +792,10 @@ public class RyaDirectExample {
 
         @Override
         public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
 
         @Override
         public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
     }
 }


[03/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java b/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java
new file mode 100644
index 0000000..c5f56cf
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/RdfCloudTripleStoreSelectivityEvaluationStatisticsTest.java
@@ -0,0 +1,304 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.layout.TablePrefixLayoutStrategy;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.joinselect.AccumuloSelectivityEvalDAO;
+import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.BatchWriter;
+import org.apache.accumulo.core.client.BatchWriterConfig;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.Mutation;
+import org.apache.accumulo.core.data.Range;
+import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
+public class RdfCloudTripleStoreSelectivityEvaluationStatisticsTest {
+
+    // TODO fix table names!!!
+
+    private static final String DELIM = "\u0000";
+    private final byte[] EMPTY_BYTE = new byte[0];
+    private final Value EMPTY_VAL = new Value(EMPTY_BYTE);
+
+    private String q1 = ""//
+            + "SELECT ?h  " //
+            + "{" //
+            + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+            + "  ?h <uri:barksAt> <uri:cat> ."//
+            + "  ?h <uri:peesOn> <uri:hydrant> . "//
+            + "}";//
+
+    private Connector conn;
+    AccumuloRdfConfiguration arc;
+    BatchWriterConfig config;
+    Instance mock;
+
+    @Before
+    public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
+
+        mock = new MockInstance("accumulo");
+        PasswordToken pToken = new PasswordToken("pass".getBytes());
+        conn = mock.getConnector("user", pToken);
+
+        config = new BatchWriterConfig();
+        config.setMaxMemory(1000);
+        config.setMaxLatency(1000, TimeUnit.SECONDS);
+        config.setMaxWriteThreads(10);
+
+        if (conn.tableOperations().exists("rya_prospects")) {
+            conn.tableOperations().delete("rya_prospects");
+        }
+        if (conn.tableOperations().exists("rya_selectivity")) {
+            conn.tableOperations().delete("rya_selectivity");
+        }
+
+        arc = new AccumuloRdfConfiguration();
+        arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy());
+        arc.setMaxRangesForScanner(300);
+
+    }
+
+    @Test
+    public void testOptimizeQ1() throws Exception {
+
+        RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+        AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+        accc.setConf(arc);
+        accc.setRdfEvalDAO(res);
+        accc.setConnector(conn);
+        accc.init();
+
+        BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+        BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+        String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+        String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+        String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+        List<Mutation> mList = new ArrayList<Mutation>();
+        List<Mutation> mList2 = new ArrayList<Mutation>();
+        List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate",
+                "predicatesubject");
+        Mutation m1, m2, m3, m4;
+
+        m1 = new Mutation(s1 + DELIM + "1");
+        m1.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+        m2 = new Mutation(s2 + DELIM + "2");
+        m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+        m3 = new Mutation(s3 + DELIM + "3");
+        m3.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+        mList.add(m1);
+        mList.add(m2);
+        mList.add(m3);
+
+        bw1.addMutations(mList);
+        bw1.close();
+
+//        Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+//        scan.setRange(new Range());
+
+//        for (Map.Entry<Key, Value> entry : scan) {
+//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
+//            System.out.println("Key is " + entry.getKey());
+//            System.out.println("Value is " + (new String(entry.getValue().get())));
+//        }
+
+        m1 = new Mutation(s1);
+        m2 = new Mutation(s2);
+        m3 = new Mutation(s3);
+        m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+        m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+        int i = 2;
+        int j = 3;
+        int k = 4;
+        Long count1;
+        Long count2;
+        Long count3;
+
+        for (String s : sList) {
+            count1 = (long) i;
+            count2 = (long) j;
+            count3 = (long) k;
+            m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+            m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+            m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+            i = 2 * i;
+            j = 2 * j;
+            k = 2 * k;
+        }
+        mList2.add(m1);
+        mList2.add(m2);
+        mList2.add(m3);
+        mList2.add(m4);
+        bw2.addMutations(mList2);
+        bw2.close();
+
+//        scan = conn.createScanner("rya_selectivity", new Authorizations());
+//        scan.setRange(new Range());
+
+//        for (Map.Entry<Key, Value> entry : scan) {
+//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
+//            System.out.println("Key is " + entry.getKey());
+//            System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+//
+//        }
+
+        TupleExpr te = getTupleExpr(q1);
+        System.out.println(te);
+
+        RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+        double card = ars.getCardinality(te);
+
+        Assert.assertEquals(6.3136, card, .0001);
+
+    }
+
+    @Test
+    public void testOptimizeQ1ZeroCard() throws Exception {
+
+        RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+        AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+        accc.setConf(arc);
+        accc.setConnector(conn);
+        accc.setRdfEvalDAO(res);
+        accc.init();
+
+        BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+        BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+        String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+        String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+        String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+        List<Mutation> mList = new ArrayList<Mutation>();
+        List<Mutation> mList2 = new ArrayList<Mutation>();
+        List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate",
+                "predicatesubject");
+        Mutation m1, m2, m3, m4;
+
+        m1 = new Mutation(s1 + DELIM + "1");
+        m1.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+        m2 = new Mutation(s2 + DELIM + "2");
+        m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+        // m3 = new Mutation(s3 + DELIM + "3");
+        // m3.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+        mList.add(m1);
+        mList.add(m2);
+        // mList.add(m3);
+
+        bw1.addMutations(mList);
+        bw1.close();
+
+//        Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+//        scan.setRange(new Range());
+
+//        for (Map.Entry<Key, Value> entry : scan) {
+//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
+//            System.out.println("Key is " + entry.getKey());
+//            System.out.println("Value is " + (new String(entry.getValue().get())));
+//        }
+
+        m1 = new Mutation(s1);
+        m2 = new Mutation(s2);
+        m3 = new Mutation(s3);
+        m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+        m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+        int i = 2;
+        int j = 3;
+        int k = 4;
+        Long count1;
+        Long count2;
+        Long count3;
+
+        for (String s : sList) {
+            count1 = (long) i;
+            count2 = (long) j;
+            count3 = (long) k;
+            m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+            m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+            m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+            i = 2 * i;
+            j = 2 * j;
+            k = 2 * k;
+        }
+        mList2.add(m1);
+        mList2.add(m2);
+        mList2.add(m3);
+        mList2.add(m4);
+        bw2.addMutations(mList2);
+        bw2.close();
+
+//        scan = conn.createScanner("rya_selectivity", new Authorizations());
+//        scan.setRange(new Range());
+
+//        for (Map.Entry<Key, Value> entry : scan) {
+//            System.out.println("Key row string is " + entry.getKey().getRow().toString());
+//            System.out.println("Key is " + entry.getKey());
+//            System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+//
+//        }
+
+        TupleExpr te = getTupleExpr(q1);
+        System.out.println(te);
+
+        RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+        double card = ars.getCardinality(te);
+
+        Assert.assertEquals(4.04, card, .0001);
+
+    }
+
+    private TupleExpr getTupleExpr(String query) throws MalformedQueryException {
+
+        SPARQLParser sp = new SPARQLParser();
+        ParsedQuery pq = sp.parseQuery(query, null);
+
+        return pq.getTupleExpr();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java b/sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java
new file mode 100644
index 0000000..d214123
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/triplestore/inference/SameAsTest.java
@@ -0,0 +1,115 @@
+package mvm.rya.triplestore.inference;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import info.aduna.iteration.Iterations;
+import junit.framework.TestCase;
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.RdfCloudTripleStoreConstants;
+import mvm.rya.api.resolver.RdfToRyaConversions;
+import mvm.rya.rdftriplestore.RdfCloudTripleStore;
+import mvm.rya.rdftriplestore.inference.InferenceEngine;
+
+import org.apache.accumulo.core.Constants;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.admin.SecurityOperations;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.security.TablePermission;
+import org.junit.Test;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+
+public class SameAsTest extends TestCase {
+    private String user = "user";
+    private String pwd = "pwd";
+    private String instance = "myinstance";
+    private String tablePrefix = "t_";
+    private Authorizations auths = Constants.NO_AUTHS;
+    private Connector connector;
+    private AccumuloRyaDAO ryaDAO;
+    private ValueFactory vf = new ValueFactoryImpl();
+    private String namespace = "urn:test#";
+    private AccumuloRdfConfiguration conf;
+
+    @Override
+    public void setUp() throws Exception {
+        super.setUp();
+        connector = new MockInstance(instance).getConnector(user, pwd.getBytes());
+        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
+        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
+        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
+        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX);
+        SecurityOperations secOps = connector.securityOperations();
+        secOps.createUser(user, pwd.getBytes(), auths);
+        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ);
+        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ);
+        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ);
+        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ);
+
+        conf = new AccumuloRdfConfiguration();
+        ryaDAO = new AccumuloRyaDAO();
+        ryaDAO.setConnector(connector);
+        conf.setTablePrefix(tablePrefix);
+        ryaDAO.setConf(conf);
+        ryaDAO.init();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        super.tearDown();
+        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
+        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
+        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
+        connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX);
+    }
+
+    @Test
+    //This isn't a good test.  It's simply a cut-and-paste from a test that was failing in a different package in the SameAsVisitor.
+    public void testGraphConfiguration() throws Exception {
+        URI a = vf.createURI(namespace, "a");
+        Statement statement = new StatementImpl(a, vf.createURI(namespace, "p"), vf.createLiteral("l"));
+        Statement statement2 = new StatementImpl(a, vf.createURI(namespace, "p2"), vf.createLiteral("l"));
+        ryaDAO.add(RdfToRyaConversions.convertStatement(statement));
+        ryaDAO.add(RdfToRyaConversions.convertStatement(statement2));
+        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "b"), vf.createURI(namespace, "p"), vf.createLiteral("l"))));
+        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "c"), vf.createURI(namespace, "n"), vf.createLiteral("l"))));
+
+        // build a connection
+        RdfCloudTripleStore store = new RdfCloudTripleStore();
+        store.setConf(conf);
+        store.setRyaDAO(ryaDAO);
+
+        InferenceEngine inferenceEngine = new InferenceEngine();
+        inferenceEngine.setRyaDAO(ryaDAO);
+        store.setInferenceEngine(inferenceEngine);
+        
+        store.initialize();
+
+        System.out.println(Iterations.asList(store.getConnection().getStatements(a, vf.createURI(namespace, "p"), vf.createLiteral("l"), false, new Resource[0])).size());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/resources/cdrdf.xml
----------------------------------------------------------------------
diff --git a/sail/src/test/resources/cdrdf.xml b/sail/src/test/resources/cdrdf.xml
new file mode 100644
index 0000000..cd02ed2
--- /dev/null
+++ b/sail/src/test/resources/cdrdf.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+
+<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+	xmlns:cd="http://www.recshop.fake/cd#">
+
+	<rdf:Description rdf:about="http://www.recshop.fake/cd/Empire_Burlesque">
+		<cd:artist>Bob Dylan</cd:artist>
+		<cd:country>USA</cd:country>
+		<cd:company>Columbia</cd:company>
+		<cd:price>10.90</cd:price>
+		<cd:year>1985</cd:year>
+	</rdf:Description>
+
+	<rdf:Description rdf:about="http://www.recshop.fake/cd/Hide_your_fingers">
+		<cd:artist>Bonnie Tyler</cd:artist>
+		<cd:country>UK</cd:country>
+		<cd:company>CBS Records</cd:company>
+		<cd:price>9.90</cd:price>
+		<cd:year>1993</cd:year>
+	</rdf:Description>
+</rdf:RDF>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/resources/namedgraphs.trig
----------------------------------------------------------------------
diff --git a/sail/src/test/resources/namedgraphs.trig b/sail/src/test/resources/namedgraphs.trig
new file mode 100644
index 0000000..748d276
--- /dev/null
+++ b/sail/src/test/resources/namedgraphs.trig
@@ -0,0 +1,37 @@
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
+@prefix swp: <http://www.w3.org/2004/03/trix/swp-1/> .
+@prefix dc: <http://purl.org/dc/elements/1.1/> .
+@prefix ex: <http://www.example.org/vocabulary#> .
+@prefix : <http://www.example.org/exampleDocument#> .
+:G1 { :Monica ex:name "Monica Murphy" .
+      :Monica ex:homepage <http://www.monicamurphy.org> .
+      :Monica ex:email <ma...@monicamurphy.org> .
+      :Monica ex:one <ma...@monicamurphy.org> .
+      :Monica ex:two <ma...@monicamurphy.org> .
+      :Monica ex:three <ma...@monicamurphy.org> .
+      :Monica ex:four <ma...@monicamurphy.org> .
+      :Monica ex:five <ma...@monicamurphy.org> .
+      :Monica ex:six <ma...@monicamurphy.org> .
+      :Monica ex:seven <ma...@monicamurphy.org> .
+      :Monica ex:eight <ma...@monicamurphy.org> .
+      :Monica ex:nine <ma...@monicamurphy.org> .
+      :Monica ex:ten <ma...@monicamurphy.org> .
+      :Monica ex:hasSkill ex:Management }
+
+:G2 { :Monica rdf:type ex:Person .
+      :Monica ex:hasSkill ex:Programming }
+
+:G4 { :Phobe ex:name "Phobe Buffet" }
+
+:G3 { :G1 swp:assertedBy _:w1 .
+      _:w1 swp:authority :Chris .
+      _:w1 dc:date "2003-10-02"^^xsd:date .
+      :G2 swp:quotedBy _:w2 .
+      :G4 swp:assertedBy _:w2 .
+      _:w2 dc:date "2003-09-03"^^xsd:date .
+      _:w2 swp:authority :Tom .
+      :Chris rdf:type ex:Person .
+      :Chris ex:email <ma...@bizer.de>.
+      :Tom rdf:type ex:Person .
+      :Tom ex:email <ma...@bizer.de>}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/resources/ntriples.nt
----------------------------------------------------------------------
diff --git a/sail/src/test/resources/ntriples.nt b/sail/src/test/resources/ntriples.nt
new file mode 100644
index 0000000..edf1190
--- /dev/null
+++ b/sail/src/test/resources/ntriples.nt
@@ -0,0 +1 @@
+<urn:lubm:rdfts#GraduateStudent> <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> <urn:lubm:rdfts#Student> .
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/resources/reification.xml
----------------------------------------------------------------------
diff --git a/sail/src/test/resources/reification.xml b/sail/src/test/resources/reification.xml
new file mode 100644
index 0000000..5ab7722
--- /dev/null
+++ b/sail/src/test/resources/reification.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+
+<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+         xmlns:cd="http://www.recshop.fake/cd#"
+        xmlns:mm="http://mvm.com/owl/2010/10/mm.owl#">
+
+    <rdf:Description rdf:nodeID="A4">
+        <rdf:subject
+                rdf:resource="http://mvm.com/owl/2010/10/mm.owl#urn:mm:mvm:root/cimv2:Linux_Processor:0:CIM_ComputerSystem:nimbus02.bullpen.net"/>
+        <rdf:predicate rdf:resource="http://mvm.com/owl/2010/10/mm.owl#loadPercentage"/>
+        <rdf:object rdf:datatype="http://www.w3.org/2001/XMLSchema#int">1</rdf:object>
+        <rdf:type rdf:resource="http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement"/>
+        <mm:reportedAt rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2011-01-07T21:29:45.545Z</mm:reportedAt>
+    </rdf:Description>
+
+</rdf:RDF>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/resources/univ-bench.owl
----------------------------------------------------------------------
diff --git a/sail/src/test/resources/univ-bench.owl b/sail/src/test/resources/univ-bench.owl
new file mode 100644
index 0000000..691a330
--- /dev/null
+++ b/sail/src/test/resources/univ-bench.owl
@@ -0,0 +1,466 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<rdf:RDF
+  xmlns="urn:lubm:rdfts#"
+  xml:base="urn:lubm:rdfts#"
+  xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+  xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
+  xmlns:owl="http://www.w3.org/2002/07/owl#"
+>
+
+<owl:Class rdf:ID="AdministrativeStaff">
+  <rdfs:label>administrative staff worker</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Employee" />
+</owl:Class>
+
+<owl:Class rdf:ID="Article">
+  <rdfs:label>article</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Publication" />
+</owl:Class>
+
+<owl:Class rdf:ID="AssistantProfessor">
+  <rdfs:label>assistant professor</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Professor" />
+</owl:Class>
+
+<owl:Class rdf:ID="AssociateProfessor">
+  <rdfs:label>associate professor</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Professor" />
+</owl:Class>
+
+<owl:Class rdf:ID="Book">
+  <rdfs:label>book</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Publication" />
+</owl:Class>
+
+<owl:Class rdf:ID="Chair">
+  <rdfs:label>chair</rdfs:label>
+  <owl:intersectionOf rdf:parseType="Collection">
+  <owl:Class rdf:about="#Person" />
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#headOf" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#Department" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </owl:intersectionOf>
+  <rdfs:subPropertyOf rdf:resource="#Professor" />
+</owl:Class>
+
+<owl:Class rdf:ID="ClericalStaff">
+  <rdfs:label>clerical staff worker</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#AdministrativeStaff" />
+</owl:Class>
+
+<owl:Class rdf:ID="College">
+  <rdfs:label>school</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Organization" />
+</owl:Class>
+
+<owl:Class rdf:ID="ConferencePaper">
+  <rdfs:label>conference paper</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Article" />
+</owl:Class>
+
+<owl:Class rdf:ID="Course">
+  <rdfs:label>teaching course</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Work" />
+</owl:Class>
+
+<owl:Class rdf:ID="Dean">
+  <rdfs:label>dean</rdfs:label>
+  <owl:intersectionOf rdf:parseType="Collection">
+  <owl:Class rdf:about="#Person" />
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#headOf" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#College" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </owl:intersectionOf>
+  <rdfs:subPropertyOf rdf:resource="#Professor" />
+</owl:Class>
+
+<owl:Class rdf:ID="Department">
+  <rdfs:label>university department</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Organization" />
+</owl:Class>
+
+<owl:Class rdf:ID="Director">
+  <rdfs:label>director</rdfs:label>
+  <owl:intersectionOf rdf:parseType="Collection">
+  <owl:Class rdf:about="#Person" />
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#headOf" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#Program" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </owl:intersectionOf>
+</owl:Class>
+
+<owl:Class rdf:ID="Employee">
+  <rdfs:label>Employee</rdfs:label>
+  <owl:intersectionOf rdf:parseType="Collection">
+  <owl:Class rdf:about="#Person" />
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#worksFor" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#Organization" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </owl:intersectionOf>
+</owl:Class>
+
+<owl:Class rdf:ID="Faculty">
+  <rdfs:label>faculty member</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Employee" />
+</owl:Class>
+
+<owl:Class rdf:ID="FullProfessor">
+  <rdfs:label>full professor</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Professor" />
+</owl:Class>
+
+<owl:Class rdf:ID="GraduateCourse">
+  <rdfs:label>Graduate Level Courses</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Course" />
+</owl:Class>
+
+<owl:Class rdf:ID="GraduateStudent">
+  <rdfs:label>graduate student</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Person" />
+  <rdfs:subPropertyOf>
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#takesCourse" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#GraduateCourse" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </rdfs:subPropertyOf>
+</owl:Class>
+
+<owl:Class rdf:ID="Institute">
+  <rdfs:label>institute</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Organization" />
+</owl:Class>
+
+<owl:Class rdf:ID="JournalArticle">
+  <rdfs:label>journal article</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Article" />
+</owl:Class>
+
+<owl:Class rdf:ID="Lecturer">
+  <rdfs:label>lecturer</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Faculty" />
+</owl:Class>
+
+<owl:Class rdf:ID="Manual">
+  <rdfs:label>manual</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Publication" />
+</owl:Class>
+
+<owl:Class rdf:ID="Organization">
+  <rdfs:label>organization</rdfs:label>
+</owl:Class>
+
+<owl:Class rdf:ID="Person">
+  <rdfs:label>person</rdfs:label>
+</owl:Class>
+
+<owl:Class rdf:ID="PostDoc">
+  <rdfs:label>post doctorate</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Faculty" />
+</owl:Class>
+
+<owl:Class rdf:ID="Professor">
+  <rdfs:label>professor</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Faculty" />
+</owl:Class>
+
+<owl:Class rdf:ID="Program">
+  <rdfs:label>program</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Organization" />
+</owl:Class>
+
+<owl:Class rdf:ID="Publication">
+  <rdfs:label>publication</rdfs:label>
+</owl:Class>
+
+<owl:Class rdf:ID="Research">
+  <rdfs:label>research work</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Work" />
+</owl:Class>
+
+<owl:Class rdf:ID="ResearchAssistant">
+  <rdfs:label>university research assistant</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Student" />
+  <rdfs:subPropertyOf>
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#worksFor" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#ResearchGroup" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </rdfs:subPropertyOf>
+</owl:Class>
+
+<owl:Class rdf:ID="ResearchGroup">
+  <rdfs:label>research group</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Organization" />
+</owl:Class>
+
+<owl:Class rdf:ID="Schedule">
+  <rdfs:label>schedule</rdfs:label>
+</owl:Class>
+
+<owl:Class rdf:ID="Software">
+  <rdfs:label>software program</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Publication" />
+</owl:Class>
+
+<owl:Class rdf:ID="Specification">
+  <rdfs:label>published specification</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Publication" />
+</owl:Class>
+
+<owl:Class rdf:ID="Student">
+  <rdfs:label>student</rdfs:label>
+  <owl:intersectionOf rdf:parseType="Collection">
+  <owl:Class rdf:about="#Person" />
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#takesCourse" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#Course" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </owl:intersectionOf>
+</owl:Class>
+
+<owl:Class rdf:ID="SystemsStaff">
+  <rdfs:label>systems staff worker</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#AdministrativeStaff" />
+</owl:Class>
+
+<owl:Class rdf:ID="TeachingAssistant">
+  <rdfs:label>university teaching assistant</rdfs:label>
+  <owl:intersectionOf rdf:parseType="Collection">
+  <owl:Class rdf:about="#Person" />
+  <owl:Restriction>
+  <owl:onProperty rdf:resource="#teachingAssistantOf" />
+  <owl:someValuesFrom>
+  <owl:Class rdf:about="#Course" />
+  </owl:someValuesFrom>
+  </owl:Restriction>
+  </owl:intersectionOf>
+</owl:Class>
+
+<owl:Class rdf:ID="TechnicalReport">
+  <rdfs:label>technical report</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Article" />
+</owl:Class>
+
+<owl:Class rdf:ID="UndergraduateStudent">
+  <rdfs:label>undergraduate student</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Student" />
+</owl:Class>
+
+<owl:Class rdf:ID="University">
+  <rdfs:label>university</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Organization" />
+</owl:Class>
+
+<owl:Class rdf:ID="UnofficialPublication">
+  <rdfs:label>unnoficial publication</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Publication" />
+</owl:Class>
+
+<owl:Class rdf:ID="VisitingProfessor">
+  <rdfs:label>visiting professor</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#Professor" />
+</owl:Class>
+
+<owl:Class rdf:ID="Work">
+  <rdfs:label>Work</rdfs:label>
+</owl:Class>
+
+<owl:ObjectProperty rdf:ID="advisor">
+  <rdfs:label>is being advised by</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+  <rdfs:range rdf:resource="#Professor" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="affiliatedOrganizationOf">
+  <rdfs:label>is affiliated with</rdfs:label>
+  <rdfs:domain rdf:resource="#Organization" />
+  <rdfs:range rdf:resource="#Organization" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="affiliateOf">
+  <rdfs:label>is affiliated with</rdfs:label>
+  <rdfs:domain rdf:resource="#Organization" />
+  <rdfs:range rdf:resource="#Person" />
+</owl:ObjectProperty>
+
+<owl:DatatypeProperty rdf:ID="age">
+  <rdfs:label>is age</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+</owl:DatatypeProperty>
+
+<owl:ObjectProperty rdf:ID="degreeFrom">
+  <rdfs:label>has a degree from</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+  <rdfs:range rdf:resource="#University" />
+  <owl:inverseOf rdf:resource="#hasAlumnus"/>
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="doctoralDegreeFrom">
+  <rdfs:label>has a doctoral degree from</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+  <rdfs:range rdf:resource="#University" />
+  <rdfs:subPropertyOf rdf:resource="#degreeFrom" />
+</owl:ObjectProperty>
+
+<owl:DatatypeProperty rdf:ID="emailAddress">
+  <rdfs:label>can be reached at</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+</owl:DatatypeProperty>
+
+<owl:ObjectProperty rdf:ID="hasAlumnus">
+  <rdfs:label>has as an alumnus</rdfs:label>
+  <rdfs:domain rdf:resource="#University" />
+  <rdfs:range rdf:resource="#Person" />
+  <owl:inverseOf rdf:resource="#degreeFrom"/>
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="headOf">
+  <rdfs:label>is the head of</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#worksFor"/>
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="listedCourse">
+  <rdfs:label>lists as a course</rdfs:label>
+  <rdfs:domain rdf:resource="#Schedule" />
+  <rdfs:range rdf:resource="#Course" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="mastersDegreeFrom">
+  <rdfs:label>has a masters degree from</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+  <rdfs:range rdf:resource="#University" />
+  <rdfs:subPropertyOf rdf:resource="#degreeFrom"/>
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="member">
+  <rdfs:label>has as a member</rdfs:label>
+  <rdfs:domain rdf:resource="#Organization" />
+  <rdfs:range rdf:resource="#Person" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="memberOf">
+<rdfs:label>member of</rdfs:label>
+<owl:inverseOf rdf:resource="#member" />
+</owl:ObjectProperty>
+
+<owl:DatatypeProperty rdf:ID="name">
+<rdfs:label>name</rdfs:label>
+</owl:DatatypeProperty>
+
+<owl:DatatypeProperty rdf:ID="officeNumber">
+  <rdfs:label>office room No.</rdfs:label>
+</owl:DatatypeProperty>
+
+<owl:ObjectProperty rdf:ID="orgPublication">
+  <rdfs:label>publishes</rdfs:label>
+  <rdfs:domain rdf:resource="#Organization" />
+  <rdfs:range rdf:resource="#Publication" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="publicationAuthor">
+  <rdfs:label>was written by</rdfs:label>
+  <rdfs:domain rdf:resource="#Publication" />
+  <rdfs:range rdf:resource="#Person" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="publicationDate">
+  <rdfs:label>was written on</rdfs:label>
+  <rdfs:domain rdf:resource="#Publication" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="publicationResearch">
+  <rdfs:label>is about</rdfs:label>
+  <rdfs:domain rdf:resource="#Publication" />
+  <rdfs:range rdf:resource="#Research" />
+</owl:ObjectProperty>
+
+<owl:DatatypeProperty rdf:ID="researchInterest">
+  <rdfs:label>is researching</rdfs:label>
+</owl:DatatypeProperty>
+
+<owl:ObjectProperty rdf:ID="researchProject">
+  <rdfs:label>has as a research project</rdfs:label>
+  <rdfs:domain rdf:resource="#ResearchGroup" />
+  <rdfs:range rdf:resource="#Research" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="softwareDocumentation">
+  <rdfs:label>is documented in</rdfs:label>
+  <rdfs:domain rdf:resource="#Software" />
+  <rdfs:range rdf:resource="#Publication" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="softwareVersion">
+  <rdfs:label>is version</rdfs:label>
+  <rdfs:domain rdf:resource="#Software" />
+</owl:ObjectProperty>
+
+<owl:TransitiveProperty rdf:ID="subOrganizationOf">
+  <rdfs:label>is part of</rdfs:label>
+  <rdfs:domain rdf:resource="#Organization" />
+  <rdfs:range rdf:resource="#Organization" />
+</owl:TransitiveProperty>
+
+<owl:ObjectProperty rdf:ID="takesCourse">
+  <rdfs:label>is taking</rdfs:label>
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="teacherOf">
+  <rdfs:label>teaches</rdfs:label>
+  <rdfs:domain rdf:resource="#Faculty" />
+  <rdfs:range rdf:resource="#Course" />
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="teachingAssistantOf">
+  <rdfs:label>is a teaching assistant for</rdfs:label>
+  <rdfs:domain rdf:resource="#TeachingAssistant" />
+  <rdfs:range rdf:resource="#Course" />
+</owl:ObjectProperty>
+
+<owl:DatatypeProperty rdf:ID="telephone">
+  <rdfs:label>telephone number</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+</owl:DatatypeProperty>
+
+<owl:ObjectProperty rdf:ID="tenured">
+  <rdfs:label>is tenured:</rdfs:label>
+  <rdfs:domain rdf:resource="#Professor" />
+</owl:ObjectProperty>
+
+<owl:DatatypeProperty rdf:ID="title">
+  <rdfs:label>title</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+</owl:DatatypeProperty>
+
+<owl:ObjectProperty rdf:ID="undergraduateDegreeFrom">
+  <rdfs:label>has an undergraduate degree from</rdfs:label>
+  <rdfs:domain rdf:resource="#Person" />
+  <rdfs:range rdf:resource="#University" />
+  <rdfs:subPropertyOf rdf:resource="#degreeFrom"/>
+</owl:ObjectProperty>
+
+<owl:ObjectProperty rdf:ID="worksFor">
+  <rdfs:label>Works For</rdfs:label>
+  <rdfs:subPropertyOf rdf:resource="#memberOf" />
+</owl:ObjectProperty>
+
+</rdf:RDF>
+

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/pom.xml
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/pom.xml b/utils/cloudbase.utils/pom.xml
deleted file mode 100644
index 371d71d..0000000
--- a/utils/cloudbase.utils/pom.xml
+++ /dev/null
@@ -1,67 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>mvm.rya</groupId>
-		<artifactId>rya.utils</artifactId>
-		<version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <artifactId>cloudbase.utils</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <dependencies>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-        </dependency>
-        
-		<!-- Test-scoped dependencies -->
-		<dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-start</artifactId>
-            <version>1.3.4</version>
-			<scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.thrift</groupId>
-            <artifactId>thrift</artifactId>
-            <version>0.3</version>
-			<scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-jci-core</artifactId>
-            <version>1.0</version>
-			<scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-jci-fam</artifactId>
-            <version>1.0</version>
-			<scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>commons-lang</groupId>
-            <artifactId>commons-lang</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>commons-collections</groupId>
-			<artifactId>commons-collections</artifactId>
-			<version>3.2</version>
-			<scope>test</scope>
-		</dependency>
-    </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/bulk/KeyRangePartitioner.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/bulk/KeyRangePartitioner.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/bulk/KeyRangePartitioner.java
deleted file mode 100644
index dcf54b3..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/bulk/KeyRangePartitioner.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package mvm.rya.cloudbase.utils.bulk;
-
-import cloudbase.core.client.mapreduce.lib.partition.RangePartitioner;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Partitioner;
-
-/**
- * Class KeyRangePartitioner
- * Date: Sep 13, 2011
- * Time: 2:45:56 PM
- */
-public class KeyRangePartitioner extends Partitioner<Key, Value> implements Configurable {
-
-    private RangePartitioner rangePartitioner = new RangePartitioner();
-    private Configuration conf;
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-        rangePartitioner.setConf(conf);
-    }
-
-    @Override
-    public int getPartition(Key key, Value value, int numReducers) {
-        return rangePartitioner.getPartition(key.getRow(), value, numReducers);
-    }
-
-    
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilter.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilter.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilter.java
deleted file mode 100644
index 965aa7f..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/filters/TimeRangeFilter.java
+++ /dev/null
@@ -1,64 +0,0 @@
-package mvm.rya.cloudbase.utils.filters;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.OptionDescriber;
-import cloudbase.core.iterators.filter.Filter;
-
-import java.util.Map;
-import java.util.TreeMap;
-
-/**
- * Set the startTime and timeRange. The filter will only keyValues that
- * are within the range [startTime - timeRange, startTime].
- *
- * @deprecated Use the LimitingAgeOffFilter
- */
-public class TimeRangeFilter implements Filter, OptionDescriber {
-    private long timeRange;
-    private long startTime;
-    public static final String TIME_RANGE_PROP = "timeRange";
-    public static final String START_TIME_PROP = "startTime";
-
-    @Override
-    public boolean accept(Key k, Value v) {
-        long diff = startTime - k.getTimestamp();
-        return !(diff > timeRange || diff < 0);
-    }
-
-    @Override
-    public void init(Map<String, String> options) {
-        if (options == null) {
-            throw new IllegalArgumentException("options must be set for TimeRangeFilter");
-        }
-
-        timeRange = -1;
-        String timeRange_s = options.get(TIME_RANGE_PROP);
-        if (timeRange_s == null)
-            throw new IllegalArgumentException("timeRange must be set for TimeRangeFilter");
-
-        timeRange = Long.parseLong(timeRange_s);
-
-        String time = options.get(START_TIME_PROP);
-        if (time != null)
-            startTime = Long.parseLong(time);
-        else
-            startTime = System.currentTimeMillis();
-    }
-
-    @Override
-    public IteratorOptions describeOptions() {
-        Map<String, String> options = new TreeMap<String, String>();
-        options.put(TIME_RANGE_PROP, "time range from the startTime (milliseconds)");
-        options.put(START_TIME_PROP, "if set, use the given value as the absolute time in milliseconds as the start time in the time range.");
-        return new OptionDescriber.IteratorOptions("timeRangeFilter", "TimeRangeFilter removes entries with timestamps outside of the given time range: " +
-                "[startTime - timeRange, startTime]",
-                options, null);
-    }
-
-    @Override
-    public boolean validateOptions(Map<String, String> options) {
-        Long.parseLong(options.get(TIME_RANGE_PROP));
-        return true;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/input/CloudbaseBatchScannerInputFormat.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/input/CloudbaseBatchScannerInputFormat.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/input/CloudbaseBatchScannerInputFormat.java
deleted file mode 100644
index b7a1c84..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/input/CloudbaseBatchScannerInputFormat.java
+++ /dev/null
@@ -1,872 +0,0 @@
-package mvm.rya.cloudbase.utils.input;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.*;
-import cloudbase.core.client.impl.Tables;
-import cloudbase.core.client.impl.TabletLocator;
-import cloudbase.core.data.*;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.security.TablePermission;
-import cloudbase.core.security.thrift.AuthInfo;
-import cloudbase.core.util.ArgumentChecker;
-import cloudbase.core.util.Pair;
-import cloudbase.core.util.TextUtil;
-import cloudbase.core.util.UtilWaitThread;
-import cloudbase.core.util.format.DefaultFormatter;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.*;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-
-import java.io.*;
-import java.lang.reflect.InvocationTargetException;
-import java.math.BigInteger;
-import java.net.InetAddress;
-import java.net.URLDecoder;
-import java.net.URLEncoder;
-import java.util.*;
-import java.util.Map.Entry;
-
-/**
- * This class allows MapReduce jobs to use Cloudbase as the source of data. This
- * input format provides keys and values of type Key and Value to the Map() and
- * Reduce() functions.
- *
- * The user must specify the following via static methods:
- *
- * <ul>
- * <li>CloudbaseInputFormat.setInputTableInfo(job, username, password, table,
- * auths)
- * <li>CloudbaseInputFormat.setZooKeeperInstance(job, instanceName, hosts)
- * </ul>
- *
- * Other static methods are optional
- */
-public class CloudbaseBatchScannerInputFormat extends InputFormat<Key, Value> {
-	private static final Logger log = Logger.getLogger(CloudbaseBatchScannerInputFormat.class);
-
-	private static final String PREFIX = CloudbaseBatchScannerInputFormat.class.getSimpleName();
-	private static final String INPUT_INFO_HAS_BEEN_SET = PREFIX + ".configured";
-	private static final String INSTANCE_HAS_BEEN_SET = PREFIX + ".instanceConfigured";
-	private static final String USERNAME = PREFIX + ".username";
-	private static final String PASSWORD = PREFIX + ".password";
-	private static final String TABLE_NAME = PREFIX + ".tablename";
-	private static final String AUTHORIZATIONS = PREFIX + ".authorizations";
-
-	private static final String INSTANCE_NAME = PREFIX + ".instanceName";
-	private static final String ZOOKEEPERS = PREFIX + ".zooKeepers";
-	private static final String MOCK = ".useMockInstance";
-
-	private static final String RANGES = PREFIX + ".ranges";
-	private static final String AUTO_ADJUST_RANGES = PREFIX + ".ranges.autoAdjust";
-
-	private static final String ROW_REGEX = PREFIX + ".regex.row";
-	private static final String COLUMN_FAMILY_REGEX = PREFIX + ".regex.cf";
-	private static final String COLUMN_QUALIFIER_REGEX = PREFIX + ".regex.cq";
-	private static final String VALUE_REGEX = PREFIX + ".regex.value";
-
-	private static final String COLUMNS = PREFIX + ".columns";
-	private static final String LOGLEVEL = PREFIX + ".loglevel";
-
-	private static final String ISOLATED = PREFIX + ".isolated";
-
-	//Used to specify the maximum # of versions of a Cloudbase cell value to return
-	private static final String MAX_VERSIONS = PREFIX + ".maxVersions";
-
-	//Used for specifying the iterators to be applied
-	private static final String ITERATORS = PREFIX + ".iterators";
-	private static final String ITERATORS_OPTIONS = PREFIX + ".iterators.options";
-	private static final String ITERATORS_DELIM = ",";
-    private BatchScanner bScanner;
-
-    /**
-	 * Enable or disable use of the {@link cloudbase.core.client.IsolatedScanner}.  By default it is not enabled.
-	 *
-	 * @param job
-	 * @param enable
-	 */
-	public static void setIsolated(JobContext job, boolean enable){
-		Configuration conf = job.getConfiguration();
-		conf.setBoolean(ISOLATED, enable);
-	}
-
-	public static void setInputInfo(JobContext job, String user, byte[] passwd, String table, Authorizations auths) {
-		Configuration conf = job.getConfiguration();
-		if (conf.getBoolean(INPUT_INFO_HAS_BEEN_SET, false))
-			throw new IllegalStateException("Input info can only be set once per job");
-		conf.setBoolean(INPUT_INFO_HAS_BEEN_SET, true);
-
-		ArgumentChecker.notNull(user, passwd, table);
-		conf.set(USERNAME, user);
-		conf.set(PASSWORD, new String(Base64.encodeBase64(passwd)));
-		conf.set(TABLE_NAME, table);
-		if (auths != null && !auths.isEmpty())
-			conf.set(AUTHORIZATIONS, auths.serialize());
-	}
-
-	public static void setZooKeeperInstance(JobContext job, String instanceName, String zooKeepers) {
-		Configuration conf = job.getConfiguration();
-		if (conf.getBoolean(INSTANCE_HAS_BEEN_SET, false))
-			throw new IllegalStateException("Instance info can only be set once per job");
-		conf.setBoolean(INSTANCE_HAS_BEEN_SET, true);
-
-		ArgumentChecker.notNull(instanceName, zooKeepers);
-		conf.set(INSTANCE_NAME, instanceName);
-		conf.set(ZOOKEEPERS, zooKeepers);
-	}
-
-	public static void setMockInstance(JobContext job, String instanceName) {
-	    Configuration conf = job.getConfiguration();
-	    conf.setBoolean(INSTANCE_HAS_BEEN_SET, true);
-	    conf.setBoolean(MOCK, true);
-		conf.set(INSTANCE_NAME, instanceName);
-	}
-
-	public static void setRanges(JobContext job, Collection<Range> ranges) {
-		ArgumentChecker.notNull(ranges);
-		ArrayList<String> rangeStrings = new ArrayList<String>(ranges.size());
-		try {
-		    for (Range r : ranges) {
-		        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-		        r.write(new DataOutputStream(baos));
-		        rangeStrings.add(new String(Base64.encodeBase64(baos.toByteArray())));
-		    }
-		} catch (IOException ex) {
-		    throw new IllegalArgumentException("Unable to encode ranges to Base64", ex);
-		}
-		job.getConfiguration().setStrings(RANGES, rangeStrings.toArray(new String[0]));
-	}
-
-	public static void disableAutoAdjustRanges(JobContext job) {
-		job.getConfiguration().setBoolean(AUTO_ADJUST_RANGES, false);
-	}
-
-	public static enum RegexType {
-		ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VALUE
-	}
-
-	public static void setRegex(JobContext job, RegexType type, String regex) {
-		ArgumentChecker.notNull(type, regex);
-		String key = null;
-		switch (type) {
-		case ROW:
-			key = ROW_REGEX;
-			break;
-		case COLUMN_FAMILY:
-			key = COLUMN_FAMILY_REGEX;
-			break;
-		case COLUMN_QUALIFIER:
-			key = COLUMN_QUALIFIER_REGEX;
-			break;
-		case VALUE:
-			key = VALUE_REGEX;
-			break;
-		default:
-			throw new NoSuchElementException();
-		}
-		try {
-			job.getConfiguration().set(key, URLEncoder.encode(regex, "UTF-8"));
-		} catch (UnsupportedEncodingException e) {
-			log.error("Failedd to encode regular expression",e);
-			throw new RuntimeException(e);
-		}
-	}
-
-
-
-
-	/**
-	 * Sets the max # of values that may be returned for an individual Cloudbase cell. By default, applied before all other
-	 * Cloudbase iterators (highest priority) leveraged in the scan by the record reader.  To adjust priority use
-	 * setIterator() & setIteratorOptions() w/ the VersioningIterator type explicitly.
-	 *
-	 * @param job the job
-	 * @param maxVersions the max versions
-	 * @throws java.io.IOException
-	 */
-	public static void setMaxVersions(JobContext job, int maxVersions) throws IOException{
-		if (maxVersions < 1) throw new IOException("Invalid maxVersions: " + maxVersions + ".  Must be >= 1");
-		job.getConfiguration().setInt(MAX_VERSIONS, maxVersions);
-	}
-
-	/**
-	 *
-	 * @param columnFamilyColumnQualifierPairs
-	 *            A pair of {@link org.apache.hadoop.io.Text} objects corresponding to column family
-	 *            and column qualifier. If the column qualifier is null, the
-	 *            entire column family is selected. An empty set is the default
-	 *            and is equivalent to scanning the all columns.
-	 */
-	public static void fetchColumns(JobContext job, Collection<Pair<Text, Text>> columnFamilyColumnQualifierPairs) {
-		ArgumentChecker.notNull(columnFamilyColumnQualifierPairs);
-		ArrayList<String> columnStrings = new ArrayList<String>(columnFamilyColumnQualifierPairs.size());
-		for (Pair<Text, Text> column : columnFamilyColumnQualifierPairs) {
-			if(column.getFirst() == null)
-				throw new IllegalArgumentException("Column family can not be null");
-
-			String col = new String(Base64.encodeBase64(TextUtil.getBytes(column.getFirst())));
-			if (column.getSecond() != null)
-				col += ":" + new String(Base64.encodeBase64(TextUtil.getBytes(column.getSecond())));
-			columnStrings.add(col);
-		}
-		job.getConfiguration().setStrings(COLUMNS, columnStrings.toArray(new String[0]));
-	}
-//
-//	public static void setLogLevel(JobContext job, Level level) {
-//		ArgumentChecker.notNull(level);
-//		log.setLevel(level);
-//		job.getConfiguration().setInt(LOGLEVEL, level.toInt());
-//	}
-
-
-	/**
-	 * Specify a Cloudbase iterator type to manage the behavior of the underlying table scan this InputFormat's Record Reader will conduct, w/ priority dictating the order
-	 * in which specified iterators are applied. Repeat calls to specify multiple iterators are allowed.
-	 *
-	 * @param job the job
-	 * @param priority the priority
-	 * @param iteratorClass the iterator class
-	 * @param iteratorName the iterator name
-	 */
-	public static void setIterator(JobContext job, int priority, String iteratorClass, String iteratorName){
-		//First check to see if anything has been set already
-		String iterators = job.getConfiguration().get(ITERATORS);
-
-		//No iterators specified yet, create a new string
-		if (iterators == null || iterators.isEmpty()) {
-			iterators = new CBIterator(priority, iteratorClass, iteratorName).toString();
-		}
-		else {
-			//append the next iterator & reset
-			iterators = iterators.concat(ITERATORS_DELIM + new CBIterator(priority, iteratorClass, iteratorName).toString());
-		}
-		//Store the iterators w/ the job
-		job.getConfiguration().set(ITERATORS, iterators);
-	}
-
-
-	/**
-	 * Specify an option for a named Cloudbase iterator, further specifying that iterator's
-	 * behavior.
-	 *
-	 * @param job the job
-	 * @param iteratorName the iterator name.  Should correspond to an iterator set w/ a prior setIterator call.
-	 * @param key the key
-	 * @param value the value
-	 */
-	public static void setIteratorOption(JobContext job, String iteratorName, String key, String value){
-	    if (value == null) return;
-
-		String iteratorOptions = job.getConfiguration().get(ITERATORS_OPTIONS);
-
-		//No options specified yet, create a new string
-		if (iteratorOptions == null || iteratorOptions.isEmpty()){
-			iteratorOptions = new CBIteratorOption(iteratorName, key, value).toString();
-		}
-		else {
-			//append the next option & reset
-			iteratorOptions = iteratorOptions.concat(ITERATORS_DELIM + new CBIteratorOption(iteratorName, key, value));
-		}
-
-		//Store the options w/ the job
-		job.getConfiguration().set(ITERATORS_OPTIONS, iteratorOptions);
-	}
-
-	protected static boolean isIsolated(JobContext job){
-		return job.getConfiguration().getBoolean(ISOLATED, false);
-	}
-
-	protected static String getUsername(JobContext job) {
-		return job.getConfiguration().get(USERNAME);
-	}
-
-
-	/**
-	 * WARNING: The password is stored in the Configuration and shared with all
-	 * MapReduce tasks; It is BASE64 encoded to provide a charset safe
-	 * conversion to a string, and is not intended to be secure.
-	 */
-	protected static byte[] getPassword(JobContext job) {
-		return Base64.decodeBase64(job.getConfiguration().get(PASSWORD, "").getBytes());
-	}
-
-	protected static String getTablename(JobContext job) {
-		return job.getConfiguration().get(TABLE_NAME);
-	}
-
-	protected static Authorizations getAuthorizations(JobContext job) {
-		String authString = job.getConfiguration().get(AUTHORIZATIONS);
-		return authString == null ? CBConstants.NO_AUTHS : new Authorizations(authString.split(","));
-	}
-
-	protected static Instance getInstance(JobContext job) {
-		Configuration conf = job.getConfiguration();
-//		if (conf.getBoolean(MOCK, false))
-//		    return new MockInstance(conf.get(INSTANCE_NAME));
-		return new ZooKeeperInstance(conf.get(INSTANCE_NAME), conf.get(ZOOKEEPERS));
-	}
-
-	protected static TabletLocator getTabletLocator(JobContext job) throws TableNotFoundException {
-//		if (job.getConfiguration().getBoolean(MOCK, false))
-//			return new MockTabletLocator();
-		Instance instance = getInstance(job);
-		String username = getUsername(job);
-		byte[] password = getPassword(job);
-		String tableName = getTablename(job);
-		return TabletLocator.getInstance(instance, new AuthInfo(username, password, instance.getInstanceID()), new Text(Tables.getTableId(instance, tableName)));
-	}
-
-	protected static List<Range> getRanges(JobContext job) throws IOException {
-		ArrayList<Range> ranges = new ArrayList<Range>();
-		for (String rangeString : job.getConfiguration().getStringCollection(RANGES)) {
-			ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(rangeString.getBytes()));
-			Range range = new Range();
-			range.readFields(new DataInputStream(bais));
-			ranges.add(range);
-		}
-		return ranges;
-	}
-
-	protected static String getRegex(JobContext job, RegexType type) {
-		String key = null;
-		switch (type) {
-		case ROW:
-			key = ROW_REGEX;
-			break;
-		case COLUMN_FAMILY:
-			key = COLUMN_FAMILY_REGEX;
-			break;
-		case COLUMN_QUALIFIER:
-			key = COLUMN_QUALIFIER_REGEX;
-			break;
-		case VALUE:
-			key = VALUE_REGEX;
-			break;
-		default:
-			throw new NoSuchElementException();
-		}
-		try {
-			String s = job.getConfiguration().get(key);
-			if(s == null)
-				return null;
-			return URLDecoder.decode(s,"UTF-8");
-		} catch (UnsupportedEncodingException e) {
-			log.error("Failed to decode regular expression", e);
-			throw new RuntimeException(e);
-		}
-	}
-
-	protected static Set<Pair<Text, Text>> getFetchedColumns(JobContext job) {
-		Set<Pair<Text, Text>> columns = new HashSet<Pair<Text, Text>>();
-		for (String col : job.getConfiguration().getStringCollection(COLUMNS)) {
-			int idx = col.indexOf(":");
-			Text cf = new Text(idx < 0 ? Base64.decodeBase64(col.getBytes()) : Base64.decodeBase64(col.substring(0, idx).getBytes()));
-			Text cq = idx < 0 ? null : new Text(Base64.decodeBase64(col.substring(idx + 1).getBytes()));
-			columns.add(new Pair<Text, Text>(cf, cq));
-		}
-		return columns;
-	}
-
-	protected static boolean getAutoAdjustRanges(JobContext job) {
-		return job.getConfiguration().getBoolean(AUTO_ADJUST_RANGES, true);
-	}
-
-	protected static Level getLogLevel(JobContext job) {
-		return Level.toLevel(job.getConfiguration().getInt(LOGLEVEL, Level.INFO.toInt()));
-	}
-
-	// InputFormat doesn't have the equivalent of OutputFormat's
-	// checkOutputSpecs(JobContext job)
-	protected static void validateOptions(JobContext job) throws IOException {
-		Configuration conf = job.getConfiguration();
-		if (!conf.getBoolean(INPUT_INFO_HAS_BEEN_SET, false))
-			throw new IOException("Input info has not been set.");
-		if (!conf.getBoolean(INSTANCE_HAS_BEEN_SET, false))
-			throw new IOException("Instance info has not been set.");
-		// validate that we can connect as configured
-		try {
-			Connector c = getInstance(job).getConnector(getUsername(job), getPassword(job));
-			if (!c.securityOperations().authenticateUser(getUsername(job), getPassword(job)))
-				throw new IOException("Unable to authenticate user");
-			if (!c.securityOperations().hasTablePermission(getUsername(job), getTablename(job), TablePermission.READ))
-				throw new IOException("Unable to access table");
-		} catch (CBException e) {
-			throw new IOException(e);
-		} catch (CBSecurityException e) {
-			throw new IOException(e);
-		}
-	}
-
-	//Get the maxVersions the VersionsIterator should be configured with.  Return -1 if none.
-	protected static int getMaxVersions(JobContext job) {
-		return job.getConfiguration().getInt(MAX_VERSIONS, -1);
-	}
-
-
-	//Return a list of the iterator settings (for iterators to apply to a scanner)
-	protected static List<CBIterator> getIterators(JobContext job){
-
-		String iterators = job.getConfiguration().get(ITERATORS);
-
-		//If no iterators are present, return an empty list
-		if (iterators == null || iterators.isEmpty()) return new ArrayList<CBIterator>();
-
-		//Compose the set of iterators encoded in the job configuration
-		StringTokenizer tokens = new StringTokenizer(job.getConfiguration().get(ITERATORS),ITERATORS_DELIM);
-		List<CBIterator> list = new ArrayList<CBIterator>();
-		while(tokens.hasMoreTokens()){
-			String itstring = tokens.nextToken();
-			list.add(new CBIterator(itstring));
-		}
-		return list;
-	}
-
-
-	//Return a list of the iterator options specified
-	protected static List<CBIteratorOption> getIteratorOptions(JobContext job){
-		String iteratorOptions = job.getConfiguration().get(ITERATORS_OPTIONS);
-
-		//If no options are present, return an empty list
-		if (iteratorOptions == null || iteratorOptions.isEmpty()) return new ArrayList<CBIteratorOption>();
-
-		//Compose the set of options encoded in the job configuration
-		StringTokenizer tokens = new StringTokenizer(job.getConfiguration().get(ITERATORS_OPTIONS), ITERATORS_DELIM);
-		List<CBIteratorOption> list = new ArrayList<CBIteratorOption>();
-		while (tokens.hasMoreTokens()){
-			String optionString = tokens.nextToken();
-			list.add(new CBIteratorOption(optionString));
-		}
-		return list;
-	}
-
-
-
-
-	@Override
-	public RecordReader<Key, Value> createRecordReader(InputSplit inSplit, TaskAttemptContext attempt) throws IOException, InterruptedException {
-//		log.setLevel(getLogLevel(attempt));
-		return new RecordReader<Key, Value>() {
-			private int recordsRead;
-			private Iterator<Entry<Key, Value>> scannerIterator;
-			private boolean scannerRegexEnabled = false;
-			private RangeInputSplit split;
-
-			private void checkAndEnableRegex(String regex, BatchScanner scanner, String CBIMethodName) throws IllegalArgumentException, SecurityException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, IOException {
-				if (regex != null) {
-					if (scannerRegexEnabled == false) {
-						scanner.setupRegex(PREFIX + ".regex.iterator", 50);
-						scannerRegexEnabled = true;
-					}
-					scanner.getClass().getMethod(CBIMethodName, String.class).invoke(scanner, regex);
-					log.info("Setting " + CBIMethodName + " to " + regex);
-				}
-			}
-
-			private boolean setupRegex(TaskAttemptContext attempt, BatchScanner scanner) throws CBException {
-				try {
-					checkAndEnableRegex(getRegex(attempt, RegexType.ROW), scanner, "setRowRegex");
-					checkAndEnableRegex(getRegex(attempt, RegexType.COLUMN_FAMILY), scanner, "setColumnFamilyRegex");
-					checkAndEnableRegex(getRegex(attempt, RegexType.COLUMN_QUALIFIER), scanner, "setColumnQualifierRegex");
-					checkAndEnableRegex(getRegex(attempt, RegexType.VALUE), scanner, "setValueRegex");
-					return true;
-				} catch (Exception e) {
-					throw new CBException("Can't set up regex for scanner");
-				}
-			}
-
-			//Apply the configured iterators from the job to the scanner
-			private void setupIterators(TaskAttemptContext attempt, BatchScanner scanner) throws CBException {
-				List<CBIterator> iterators = getIterators(attempt);
-				List<CBIteratorOption> options = getIteratorOptions(attempt);
-
-				//Loop through the iterators & options, wiring them up to the scanner.
-				try {
-					for(CBIterator iterator: iterators){
-						scanner.setScanIterators(iterator.getPriority(), iterator.getIteratorClass(), iterator.getIteratorName());
-					}
-					for (CBIteratorOption option: options){
-						scanner.setScanIteratorOption(option.getIteratorName(), option.getKey(), option.getValue());
-					}
-				}
-				catch (Exception e) {
-					throw new CBException(e);
-				}
-			}
-
-			//Apply the VersioningIterator at priority 0 based on the job config
-			private void setupMaxVersions(TaskAttemptContext attempt, BatchScanner scanner) throws CBException {
-				int maxVersions = getMaxVersions(attempt);
-				//Check to make sure its a legit value
-				if (maxVersions >= 1) {
-					try {
-						scanner.setScanIterators(0, cloudbase.core.iterators.VersioningIterator.class.getName(), "vers");
-					}
-					catch (Exception e){
-						throw new CBException(e);
-					}
-					scanner.setScanIteratorOption("vers", "maxVersions", new Integer(maxVersions).toString());
-				}
-			}
-
-			public void initialize(InputSplit inSplit, TaskAttemptContext attempt) throws IOException {
-                split = (RangeInputSplit) inSplit;
-				log.debug("Initializing input split: " + split.range);
-				Instance instance = getInstance(attempt);
-				String user = getUsername(attempt);
-				byte[] password = getPassword(attempt);
-				Authorizations authorizations = getAuthorizations(attempt);
-
-				try {
-					log.debug("Creating connector with user: " + user);
-					Connector conn = instance.getConnector(user, password);
-					log.debug("Creating scanner for table: " + getTablename(attempt));
-					log.debug("Authorizations are: " + authorizations);
-					bScanner = conn.createBatchScanner(getTablename(attempt), authorizations, 10);
-//					if(isIsolated(attempt)){
-//						log.info("Creating isolated scanner");
-//						bScanner = new IsolatedScanner(bScanner);
-//					}
-					setupMaxVersions(attempt, bScanner);
-					setupRegex(attempt, bScanner);
-					setupIterators(attempt, bScanner);
-				} catch (Exception e) {
-					throw new IOException(e);
-				}
-
-				// setup a scanner within the bounds of this split
-				for (Pair<Text, Text> c : getFetchedColumns(attempt)) {
-					if (c.getSecond() != null)
-						bScanner.fetchColumn(c.getFirst(), c.getSecond());
-					else
-						bScanner.fetchColumnFamily(c.getFirst());
-				}
-
-				bScanner.setRanges(Collections.singleton(split.range));
-
-				recordsRead = 0;
-
-				// do this last after setting all scanner options
-				scannerIterator = bScanner.iterator();
-			}
-
-			public void close() {
-                bScanner.close();
-			}
-
-			public float getProgress() throws IOException {
-				if(recordsRead > 0 && currentKey == null)
-					return 1.0f;
-				return split.getProgress(currentKey);
-			}
-
-			private Key currentKey = null;
-			private Value currentValue = null;
-
-			@Override
-			public Key getCurrentKey() throws IOException, InterruptedException {
-				return currentKey;
-			}
-
-			@Override
-			public Value getCurrentValue() throws IOException, InterruptedException {
-				return currentValue;
-			}
-
-			@Override
-			public boolean nextKeyValue() throws IOException, InterruptedException {
-				if (scannerIterator.hasNext()) {
-					++recordsRead;
-					Entry<Key, Value> entry = scannerIterator.next();
-					currentKey = entry.getKey();
-					currentValue = entry.getValue();
-					if (log.isTraceEnabled())
-						log.trace("Processing key/value pair: " + DefaultFormatter.formatEntry(entry, true));
-					return true;
-				}
-				return false;
-			}
-		};
-	}
-
-	/**
-	 * read the metadata table to get tablets of interest these each become a
-	 * split
-	 */
-	public List<InputSplit> getSplits(JobContext job) throws IOException {
-//		log.setLevel(getLogLevel(job));
-		validateOptions(job);
-
-		String tableName = getTablename(job);
-		boolean autoAdjust = getAutoAdjustRanges(job);
-		List<Range> ranges = autoAdjust ? Range.mergeOverlapping(getRanges(job)) : getRanges(job);
-
-		if (ranges.isEmpty()) {
-			ranges = new ArrayList<Range>(1);
-			ranges.add(new Range());
-		}
-
-		// get the metadata information for these ranges
-		Map<String, Map<KeyExtent, List<Range>>> binnedRanges = new HashMap<String, Map<KeyExtent, List<Range>>>();
-		TabletLocator tl;
-		try {
-			tl = getTabletLocator(job);
-			while (!tl.binRanges(ranges, binnedRanges).isEmpty()) {
-				log.warn("Unable to locate bins for specified ranges. Retrying.");
-				UtilWaitThread.sleep(100 + (int) (Math.random() * 100)); // sleep
-				// randomly
-				// between
-				// 100
-				// and
-				// 200
-				// ms
-			}
-		} catch (Exception e) {
-			throw new IOException(e);
-		}
-
-		ArrayList<InputSplit> splits = new ArrayList<InputSplit>(ranges.size());
-		HashMap<Range, ArrayList<String>> splitsToAdd = null;
-
-		if (!autoAdjust)
-			splitsToAdd = new HashMap<Range, ArrayList<String>>();
-
-		HashMap<String,String> hostNameCache = new HashMap<String,String>();
-
-		for (Entry<String, Map<KeyExtent, List<Range>>> tserverBin : binnedRanges.entrySet()) {
-			String ip = tserverBin.getKey().split(":", 2)[0];
-			String location = hostNameCache.get(ip);
-			if (location == null) {
-				InetAddress inetAddress = InetAddress.getByName(ip);
-				location = inetAddress.getHostName();
-				hostNameCache.put(ip, location);
-			}
-
-			for (Entry<KeyExtent, List<Range>> extentRanges : tserverBin.getValue().entrySet()) {
-				Range ke = extentRanges.getKey().toDataRange();
-				for (Range r : extentRanges.getValue()) {
-					if (autoAdjust) {
-						// divide ranges into smaller ranges, based on the
-						// tablets
-						splits.add(new RangeInputSplit(tableName, ke.clip(r), new String[] { location }));
-					} else {
-						// don't divide ranges
-						ArrayList<String> locations = splitsToAdd.get(r);
-						if (locations == null)
-							locations = new ArrayList<String>(1);
-						locations.add(location);
-						splitsToAdd.put(r, locations);
-					}
-				}
-			}
-		}
-
-		if (!autoAdjust)
-			for (Entry<Range, ArrayList<String>> entry : splitsToAdd.entrySet())
-				splits.add(new RangeInputSplit(tableName, entry.getKey(), entry.getValue().toArray(new String[0])));
-		return splits;
-	}
-
-
-
-	/**
-	 * The Class RangeInputSplit.   Encapsulates a Cloudbase range for use in Map Reduce jobs.
-	 */
-	public static class RangeInputSplit extends InputSplit implements Writable {
-		private Range range;
-		private String[] locations;
-
-		public RangeInputSplit() {
-			range = new Range();
-			locations = new String[0];
-		}
-
-		private static byte[] extractBytes(ByteSequence seq, int numBytes)
-		{
-			byte [] bytes = new byte[numBytes+1];
-			bytes[0] = 0;
-			for(int i = 0; i < numBytes; i++)
-			{
-				if(i >= seq.length())
-					bytes[i+1] = 0;
-				else
-					bytes[i+1] = seq.byteAt(i);
-			}
-			return bytes;
-		}
-
-		public static float getProgress(ByteSequence start, ByteSequence end, ByteSequence position)
-		{
-			int maxDepth = Math.min(Math.max(end.length(),start.length()),position.length());
-			BigInteger startBI = new BigInteger(extractBytes(start,maxDepth));
-			BigInteger endBI = new BigInteger(extractBytes(end,maxDepth));
-			BigInteger positionBI = new BigInteger(extractBytes(position,maxDepth));
-			return (float)(positionBI.subtract(startBI).doubleValue() / endBI.subtract(startBI).doubleValue());
-		}
-
-		public float getProgress(Key currentKey) {
-			if(currentKey == null)
-				return 0f;
-			if(range.getStartKey() != null && range.getEndKey() != null)
-			{
-				if(range.getStartKey().compareTo(range.getEndKey(), PartialKey.ROW)!= 0)
-				{
-					// just look at the row progress
-					return getProgress(range.getStartKey().getRowData(),range.getEndKey().getRowData(),currentKey.getRowData());
-				}
-				else if(range.getStartKey().compareTo(range.getEndKey(), PartialKey.ROW_COLFAM)!= 0)
-				{
-					// just look at the column family progress
-					return getProgress(range.getStartKey().getColumnFamilyData(),range.getEndKey().getColumnFamilyData(),currentKey.getColumnFamilyData());
-				}
-				else if(range.getStartKey().compareTo(range.getEndKey(), PartialKey.ROW_COLFAM_COLQUAL)!= 0)
-				{
-					// just look at the column qualifier progress
-					return getProgress(range.getStartKey().getColumnQualifierData(),range.getEndKey().getColumnQualifierData(),currentKey.getColumnQualifierData());
-				}
-			}
-			// if we can't figure it out, then claim no progress
-			return 0f;
-		}
-
-		RangeInputSplit(String table, Range range, String[] locations) {
-			this.range = range;
-			this.locations = locations;
-		}
-
-	    /**
-	     * @deprecated Since 1.3; Don't use this method to compute any reasonable distance metric.}
-	     */
-		@Deprecated
-		public long getLength() throws IOException {
-			Text startRow = range.isInfiniteStartKey() ? new Text(new byte[] { Byte.MIN_VALUE }) : range.getStartKey().getRow();
-			Text stopRow = range.isInfiniteStopKey() ? new Text(new byte[] { Byte.MAX_VALUE }) : range.getEndKey().getRow();
-			int maxCommon = Math.min(7, Math.min(startRow.getLength(), stopRow.getLength()));
-			long diff = 0;
-
-			byte[] start = startRow.getBytes();
-			byte[] stop = stopRow.getBytes();
-			for (int i = 0; i < maxCommon; ++i) {
-				diff |= 0xff & (start[i] ^ stop[i]);
-				diff <<= Byte.SIZE;
-			}
-
-			if (startRow.getLength() != stopRow.getLength())
-				diff |= 0xff;
-
-			return diff + 1;
-		}
-
-		public String[] getLocations() throws IOException {
-			return locations;
-		}
-
-		public void readFields(DataInput in) throws IOException {
-			range.readFields(in);
-			int numLocs = in.readInt();
-			locations = new String[numLocs];
-			for (int i = 0; i < numLocs; ++i)
-				locations[i] = in.readUTF();
-		}
-
-		public void write(DataOutput out) throws IOException {
-			range.write(out);
-			out.writeInt(locations.length);
-			for (int i = 0; i < locations.length; ++i)
-				out.writeUTF(locations[i]);
-		}
-	}
-
-	/**
-	 * The Class IteratorSetting.  Encapsulates specifics for an Cloudbase iterator's name & priority.
-	 */
-	static class CBIterator{
-
-		private static final String FIELD_SEP = ":";
-
-		private int priority;
-		private String iteratorClass;
-		private String iteratorName;
-
-
-		public CBIterator (int priority, String iteratorClass, String iteratorName){
-			this.priority = priority;
-			this.iteratorClass = iteratorClass;
-			this.iteratorName = iteratorName;
-		}
-
-		//Parses out a setting given an string supplied from an earlier toString() call
-		public CBIterator (String iteratorSetting){
-			//Parse the string to expand the iterator
-			StringTokenizer tokenizer = new StringTokenizer(iteratorSetting, FIELD_SEP);
-			priority = Integer.parseInt(tokenizer.nextToken());
-			iteratorClass = tokenizer.nextToken();
-			iteratorName = tokenizer.nextToken();
-		}
-
-		public int getPriority() {
-			return priority;
-		}
-
-		public String getIteratorClass() {
-			return iteratorClass;
-		}
-
-		public String getIteratorName() {
-			return iteratorName;
-		}
-
-		@Override
-		public String toString(){
-			return new String(priority + FIELD_SEP + iteratorClass + FIELD_SEP + iteratorName);
-		}
-
-	}
-
-	/**
-	 * The Class CBIteratorOption. Encapsulates specifics for a Cloudbase iterator's optional configuration
-	 * details - associated via the iteratorName.
-	 */
-	static class CBIteratorOption {
-		private static final String FIELD_SEP = ":";
-
-		private String iteratorName;
-		private String key;
-		private String value;
-
-		public CBIteratorOption(String iteratorName, String key, String value){
-			this.iteratorName = iteratorName;
-			this.key = key;
-			this.value = value;
-		}
-
-		//Parses out an option given a string supplied from an earlier toString() call
-		public CBIteratorOption(String iteratorOption){
-			StringTokenizer tokenizer = new StringTokenizer(iteratorOption, FIELD_SEP);
-			this.iteratorName = tokenizer.nextToken();
-			this.key = tokenizer.nextToken();
-			this.value = tokenizer.nextToken();
-		}
-
-		public String getIteratorName() {
-			return iteratorName;
-		}
-
-		public String getKey() {
-			return key;
-		}
-
-		public String getValue() {
-			return value;
-		}
-
-		@Override
-		public String toString() {
-			return new String(iteratorName + FIELD_SEP + key + FIELD_SEP + value);
-		}
-
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/BatchScannerList.java
----------------------------------------------------------------------
diff --git a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/BatchScannerList.java b/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/BatchScannerList.java
deleted file mode 100644
index 76b9e22..0000000
--- a/utils/cloudbase.utils/src/main/java/mvm/rya/cloudbase/utils/scanner/BatchScannerList.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package mvm.rya.cloudbase.utils.scanner;
-
-import cloudbase.core.client.*;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.util.ArgumentChecker;
-import com.google.common.collect.Iterators;
-import org.apache.hadoop.io.Text;
-
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/18/12
- * Time: 11:06 AM
- * To change this template use File | Settings | File Templates.
- */
-public class BatchScannerList implements BatchScanner{
-    private List<BatchScanner> scanners = new ArrayList<BatchScanner>();
-
-    public BatchScannerList(List<BatchScanner> scanners) {
-        this.scanners = scanners;
-    }
-
-    //setRanges
-    public void setRanges(Collection<Range> ranges) {
-        ArgumentChecker.notNull(ranges);
-        for(BatchScanner scanner : scanners) {
-            scanner.setRanges(ranges);
-        }
-    }
-
-    public Iterator<Map.Entry<Key, Value>> iterator() {
-        List<Iterator<Map.Entry<Key,Value>>> iterators = new ArrayList<Iterator<Map.Entry<Key, Value>>>();
-        for(BatchScanner scanner: scanners) {
-            iterators.add(scanner.iterator());
-        }
-        return Iterators.concat(iterators.toArray(new Iterator[]{}));
-    }
-
-    public void close() {
-        for(BatchScanner scanner: scanners) {
-            scanner.close();
-        }
-    }
-
-    public void setScanIterators(int i, String s, String s1) throws IOException {
-        for(BatchScanner scanner: scanners) {
-            scanner.setScanIterators(i, s, s1);
-        }
-    }
-
-    public void setScanIteratorOption(String s, String s1, String s2) {
-        for(BatchScanner scanner: scanners) {
-            scanner.setScanIteratorOption(s, s1, s2);
-        }
-    }
-
-    @Override
-    public void setupRegex(String s, int i) throws IOException {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public void setRowRegex(String s) {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public void setColumnFamilyRegex(String s) {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public void setColumnQualifierRegex(String s) {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public void setValueRegex(String s) {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    public void fetchColumnFamily(Text cf) {
-        for(BatchScanner scanner: scanners) {
-            scanner.fetchColumnFamily(cf);
-        }
-    }
-
-    public void fetchColumn(Text cf, Text cq) {
-        for(BatchScanner scanner: scanners) {
-            scanner.fetchColumn(cf, cq);
-        }
-    }
-
-    @Override
-    public void clearColumns() {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public void clearScanIterators() {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-}


[40/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
index 807a387..f529569 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
@@ -1,32 +1,32 @@
 package mvm.rya.indexing.accumulo.freetext;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.getNodeIterator;
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
@@ -35,12 +35,8 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
 import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
 import mvm.rya.api.resolver.RyaToRdfConversions;
 import mvm.rya.indexing.FreeTextIndexer;
 import mvm.rya.indexing.StatementContraints;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java
index 49798c4..31666c9 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/ColumnPrefixes.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java
index ed55f62..471870b 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/FreeTextTupleSet.java
@@ -1,5 +1,25 @@
 package mvm.rya.indexing.accumulo.freetext;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
@@ -10,7 +30,6 @@ import mvm.rya.indexing.IndexingExpr;
 import mvm.rya.indexing.IteratorFactory;
 import mvm.rya.indexing.SearchFunction;
 import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.accumulo.geo.GeoTupleSet;
 import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
 
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java
index 20e9fd0..abda04a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/LuceneTokenizer.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.io.StringReader;
 import java.util.SortedSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java
index 4c189b9..e98e676 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/SimpleTokenizer.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.SortedSet;
 import java.util.TreeSet;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java
index 23f4ae3..24b40cd 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/Tokenizer.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.SortedSet;
 
 /**
@@ -27,4 +28,4 @@ import java.util.SortedSet;
  */
 public interface Tokenizer {
 	public SortedSet<String> tokenize(String sting);
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
index 3bf4086..355fe14 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext.iterators;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java
index 7f73b13..a69b78a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/iterators/BooleanTreeIterator.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext.iterators;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.allChildrenAreNot;
 import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.findFirstNonNotChild;
 import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.getNodeIterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java
index d87f57d..95783e5 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTExpression.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * This is a slightly modified version of the ASTExpression file created by JavaCC. This version adds more state to the standard ASTTerm
  * file including a "type", and "notFlag".

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java
index d328812..27edaac 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTNodeUtils.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java
index 18e856c..71ff16a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTSimpleNode.java
@@ -3,25 +3,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 public
 class ASTSimpleNode extends SimpleNode {
   public ASTSimpleNode(int id) {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java
index 950c873..6232096 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/ASTTerm.java
@@ -1,25 +1,26 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * This is a slightly modified version of the ASTTerm file created by JavaCC. This version adds more state to the standard ASTTerm file
  * including a "term", "type", and "notFlag".


[45/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java
index 894ed84..c59cb87 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.query;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.openrdf.query.BindingSet;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java
index 8d90b0c..b4333bd 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.query;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java
index a4d0a40..f4c3081 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.query;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java
index 95d76b9..d2dcef9 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.query;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import org.apache.accumulo.core.client.ScannerBase;
 import org.apache.accumulo.core.data.Key;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java
index a2381b2..97d2f54 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.utils;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.iterators.Filter;
@@ -83,4 +84,4 @@ public class TimeRangeFilter extends Filter {
         Long.parseLong(options.get(TIME_RANGE_PROP));
         return true;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
index d1ddbaa..b7c9079 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.apache.accumulo.core.security.Authorizations;
 import org.junit.Test;
 import org.slf4j.Logger;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
index 9295dd9..ab4528b 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 import mvm.rya.accumulo.query.AccumuloRyaQueryEngine;
 import mvm.rya.api.RdfCloudTripleStoreUtils;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java
index 168e85c..7c3331d 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java
@@ -1,24 +1,24 @@
-//package mvm.rya.accumulo;
-
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
+
+//package mvm.rya.accumulo;
+
 //
 //import junit.framework.TestCase;
 //import mvm.rya.accumulo.AccumuloRdfConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java
index 07e7287..bda73e2 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.eval;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.accumulo.AccumuloRdfConfiguration;
 import mvm.rya.accumulo.AccumuloRyaDAO;
 import mvm.rya.api.RdfCloudTripleStoreConstants;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java
index 0d24b62..02b8357 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/fileinput/RdfFileInputToolTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.fileinput;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Iterator;
 import java.util.Map;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java
index 2a09669..5ac2d74 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/Upgrade322ToolTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.upgrade;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.accumulo.AccumuloRdfConfiguration;
 import mvm.rya.accumulo.AccumuloRyaDAO;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java
index 027bd7e..b138292 100644
--- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java
+++ b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/upgrade/UpgradeObjectSerializationTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.mr.upgrade;
 
 /*
- * #%L
- * mvm.rya.accumulo.rya
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.resolver.impl.*;
 import org.junit.Test;
 
@@ -115,4 +116,4 @@ public class UpgradeObjectSerializationTest {
 
         assertEquals("c024000000000000", upgrade);
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/pom.xml
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/pom.xml b/dao/cloudbase.rya/pom.xml
deleted file mode 100644
index e08e111..0000000
--- a/dao/cloudbase.rya/pom.xml
+++ /dev/null
@@ -1,103 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.dao</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>cloudbase.rya</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <dependencies>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>cloudbase.utils</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>cloudbase.iterators</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.indexing</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <!-- Cloudbase deps -->
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.zookeeper</groupId>
-            <artifactId>zookeeper</artifactId>
-            <exclusions>
-                <!-- the log4j that comes with zookeeper 3.3.5 has some bad dependencies -->
-                <exclusion>
-                    <groupId>javax.jms</groupId>
-                    <artifactId>jms</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jdmk</groupId>
-                    <artifactId>jmxtools</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jmx</groupId>
-                    <artifactId>jmxri</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>com.texeltek</groupId>
-            <artifactId>accumulo-cloudbase-shim</artifactId>
-            <optional>true</optional>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>cloudbase.iterators</artifactId>
-            <optional>true</optional>
-        </dependency>
-
-
-        <!-- Sesame runtime -->
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-ntriples</artifactId>
-            <version>${openrdf.sesame.version}</version>
-        </dependency>
-
-    </dependencies>
-
-    <profiles>
-        <profile>
-            <id>mr</id>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-shade-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <configuration>
-                                    <transformers>
-                                        <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-                                    </transformers>
-                                </configuration>
-                            </execution>
-                        </executions>
-
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/BatchScannerIterator.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/BatchScannerIterator.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/BatchScannerIterator.java
deleted file mode 100644
index 7980d85..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/BatchScannerIterator.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-
-import java.util.Iterator;
-import java.util.Map.Entry;
-import java.util.NoSuchElementException;
-
-/**
- * The intention of this iterator is the wrap the iterator that is returned by a
- * BatchScan in cloudbase in order to serve as a workaround for
- * ACCUMULO-226 (https://issues.apache.org/jira/browse/ACCUMULO-226).  The bug
- * involves subsequent calls to hasNext() on batch scan results after false has been
- * returned will return true
- * <p/>
- * A patch has been submitted and accepted in Accumulo but this wrapper can be used
- * for previous versions of Cloudbase/Accumulo that do not yet have the patch.
- */
-public class BatchScannerIterator implements Iterator<Entry<Key, Value>> {
-
-    private Iterator<Entry<Key, Value>> cloudbaseScanner = null;
-
-    private Entry<Key, Value> nextKeyValue = null;
-
-    public BatchScannerIterator(Iterator<Entry<Key, Value>> cloudbaseScanner) {
-        this.cloudbaseScanner = cloudbaseScanner;
-    }
-
-    public boolean hasNext() {
-        if (nextKeyValue == null) {
-            if (cloudbaseScanner.hasNext()) {
-                nextKeyValue = cloudbaseScanner.next();
-            }
-        }
-        return !isTerminatingKeyValue(nextKeyValue);
-    }
-
-    private boolean isTerminatingKeyValue(Entry<Key, Value> nextEntry) {
-        if (nextEntry == null) {
-            return true;
-        }
-        return !(nextEntry.getKey() != null && nextEntry.getValue() != null); //Condition taken from cloudbase's TabletServerBatchReaderIterator
-    }
-
-    public Entry<Key, Value> next() {
-        if (hasNext()) {
-            Entry<Key, Value> entry = nextKeyValue;
-            nextKeyValue = null;
-            return entry;
-        } else {
-            throw new NoSuchElementException();
-        }
-    }
-
-    public void remove() {
-        cloudbaseScanner.remove();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseNamespaceTableIterator.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseNamespaceTableIterator.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseNamespaceTableIterator.java
deleted file mode 100644
index b20d79c..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseNamespaceTableIterator.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.base.Preconditions;
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.persist.RdfDAOException;
-import org.openrdf.model.Namespace;
-import org.openrdf.model.impl.NamespaceImpl;
-
-import java.io.IOError;
-import java.util.Iterator;
-import java.util.Map.Entry;
-
-public class CloudbaseNamespaceTableIterator<T extends Namespace> implements
-        CloseableIteration<Namespace, RdfDAOException> {
-
-    private boolean open = false;
-    private Iterator<Entry<Key, Value>> result;
-
-    public CloudbaseNamespaceTableIterator(Iterator<Entry<Key, Value>> result) throws RdfDAOException {
-        Preconditions.checkNotNull(result);
-        open = true;
-        this.result = result;
-    }
-
-    @Override
-    public void close() throws RdfDAOException {
-        try {
-            verifyIsOpen();
-            open = false;
-        } catch (IOError e) {
-            throw new RdfDAOException(e);
-        }
-    }
-
-    public void verifyIsOpen() throws RdfDAOException {
-        if (!open) {
-            throw new RdfDAOException("Iterator not open");
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws RdfDAOException {
-        verifyIsOpen();
-        return result != null && result.hasNext();
-    }
-
-    @Override
-    public Namespace next() throws RdfDAOException {
-        if (hasNext()) {
-            return getNamespace(result);
-        }
-        return null;
-    }
-
-    public static Namespace getNamespace(Iterator<Entry<Key, Value>> rowResults) {
-        for (; rowResults.hasNext(); ) {
-            Entry<Key, Value> next = rowResults.next();
-            Key key = next.getKey();
-            Value val = next.getValue();
-            String cf = key.getColumnFamily().toString();
-            String cq = key.getColumnQualifier().toString();
-            return new NamespaceImpl(key.getRow().toString(), new String(
-                    val.get()));
-        }
-        return null;
-    }
-
-    @Override
-    public void remove() throws RdfDAOException {
-        next();
-    }
-
-    public boolean isOpen() {
-        return open;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConfiguration.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConfiguration.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConfiguration.java
deleted file mode 100644
index e25c910..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConfiguration.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.security.Authorizations;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/25/12
- * Time: 3:24 PM
- * To change this template use File | Settings | File Templates.
- */
-public class CloudbaseRdfConfiguration extends RdfCloudTripleStoreConfiguration {
-
-    public static final String MAXRANGES_SCANNER = "cb.query.maxranges";
-
-    public CloudbaseRdfConfiguration() {
-        super();
-    }
-
-    public CloudbaseRdfConfiguration(Configuration other) {
-        super(other);
-    }
-
-    @Override
-    public CloudbaseRdfConfiguration clone() {
-        return new CloudbaseRdfConfiguration(this);
-    }
-
-    public Authorizations getAuthorizations() {
-        String[] auths = getAuths();
-        if (auths == null || auths.length == 0)
-            return CloudbaseRdfConstants.ALL_AUTHORIZATIONS;
-        return new Authorizations(auths);
-    }
-
-    public void setMaxRangesForScanner(Integer max) {
-        setInt(MAXRANGES_SCANNER, max);
-    }
-
-    public Integer getMaxRangesForScanner() {
-        return getInt(MAXRANGES_SCANNER, 2);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConstants.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConstants.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConstants.java
deleted file mode 100644
index 690a050..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfConstants.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.security.ColumnVisibility;
-
-/**
- * Interface CloudbaseRdfConstants
- * Date: Mar 1, 2012
- * Time: 7:24:52 PM
- */
-public interface CloudbaseRdfConstants {
-    public static final Authorizations ALL_AUTHORIZATIONS = CBConstants.NO_AUTHS;
-
-    public static final Value EMPTY_VALUE = new Value(new byte[0]);
-
-    public static final ColumnVisibility EMPTY_CV = new ColumnVisibility(new byte[0]);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfEvalStatsDAO.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfEvalStatsDAO.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfEvalStatsDAO.java
deleted file mode 100644
index 075d1fe..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfEvalStatsDAO.java
+++ /dev/null
@@ -1,138 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.admin.TableOperations;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.security.Authorizations;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreStatement;
-import mvm.rya.api.layout.TableLayoutStrategy;
-import mvm.rya.api.persist.RdfDAOException;
-import mvm.rya.api.persist.RdfEvalStatsDAO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Value;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Map;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static mvm.rya.api.RdfCloudTripleStoreConstants.*;
-
-/**
- * Class CloudbaseRdfEvalStatsDAO
- * Date: Feb 28, 2012
- * Time: 5:03:16 PM
- */
-public class CloudbaseRdfEvalStatsDAO implements RdfEvalStatsDAO<CloudbaseRdfConfiguration> {
-
-    private boolean initialized = false;
-    private CloudbaseRdfConfiguration conf = new CloudbaseRdfConfiguration();
-
-    private Collection<RdfCloudTripleStoreStatement> statements = new ArrayList<RdfCloudTripleStoreStatement>();
-    private Connector connector;
-
-    //    private String evalTable = TBL_EVAL;
-    private TableLayoutStrategy tableLayoutStrategy;
-
-    @Override
-    public void init() throws RdfDAOException {
-        try {
-            if (isInitialized()) {
-                throw new IllegalStateException("Already initialized");
-            }
-            checkNotNull(connector);
-            tableLayoutStrategy = conf.getTableLayoutStrategy();
-//            evalTable = conf.get(RdfCloudTripleStoreConfiguration.CONF_TBL_EVAL, evalTable);
-//            conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_EVAL, evalTable);
-
-            TableOperations tos = connector.tableOperations();
-            CloudbaseRdfUtils.createTableIfNotExist(tos, tableLayoutStrategy.getEval());
-//            boolean tableExists = tos.exists(evalTable);
-//            if (!tableExists)
-//                tos.create(evalTable);
-            initialized = true;
-        } catch (Exception e) {
-            throw new RdfDAOException(e);
-        }
-    }
-
-    @Override
-    public double getCardinality(CloudbaseRdfConfiguration conf, CARDINALITY_OF card, Value val) throws RdfDAOException {
-        return this.getCardinality(conf, card, val, null);
-    }
-
-    @Override
-    public double getCardinality(CloudbaseRdfConfiguration conf, CARDINALITY_OF card, Value val, Resource context) throws RdfDAOException {
-        try {
-            Authorizations authorizations = conf.getAuthorizations();
-            Scanner scanner = connector.createScanner(tableLayoutStrategy.getEval(), authorizations);
-            Text cfTxt = null;
-            if (CARDINALITY_OF.SUBJECT.equals(card)) {
-                cfTxt = SUBJECT_CF_TXT;
-            } else if (CARDINALITY_OF.PREDICATE.equals(card)) {
-                cfTxt = PRED_CF_TXT;
-            } else if (CARDINALITY_OF.OBJECT.equals(card)) {
-//                cfTxt = OBJ_CF_TXT;     //TODO: How do we do object cardinality
-                return Double.MAX_VALUE;
-            } else throw new IllegalArgumentException("Not right Cardinality[" + card + "]");
-            Text cq = EMPTY_TEXT;
-            if (context != null) {
-                cq = new Text(context.stringValue().getBytes());
-            }
-            scanner.fetchColumn(cfTxt, cq);
-            scanner.setRange(new Range(new Text(val.stringValue().getBytes())));
-            Iterator<Map.Entry<Key, cloudbase.core.data.Value>> iter = scanner.iterator();
-            if (iter.hasNext()) {
-                return Double.parseDouble(new String(iter.next().getValue().get()));
-            }
-        } catch (Exception e) {
-            throw new RdfDAOException(e);
-        }
-
-        //default
-        return -1;
-    }
-
-    @Override
-    public void destroy() throws RdfDAOException {
-        if (!isInitialized()) {
-            throw new IllegalStateException("Not initialized");
-        }
-        initialized = false;
-    }
-
-    @Override
-    public boolean isInitialized() throws RdfDAOException {
-        return initialized;
-    }
-
-    public Connector getConnector() {
-        return connector;
-    }
-
-    public void setConnector(Connector connector) {
-        this.connector = connector;
-    }
-
-//    public String getEvalTable() {
-//        return evalTable;
-//    }
-//
-//    public void setEvalTable(String evalTable) {
-//        this.evalTable = evalTable;
-//    }
-
-    public CloudbaseRdfConfiguration getConf() {
-        return conf;
-    }
-
-    public void setConf(CloudbaseRdfConfiguration conf) {
-        this.conf = conf;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfUtils.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfUtils.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfUtils.java
deleted file mode 100644
index 9114ae8..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRdfUtils.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.TableExistsException;
-import cloudbase.core.client.admin.TableOperations;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import mvm.rya.api.resolver.triple.TripleRow;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_BYTES;
-
-/**
- * Class CloudbaseRdfUtils
- * Date: Mar 1, 2012
- * Time: 7:15:54 PM
- */
-public class CloudbaseRdfUtils {
-    private static final Log logger = LogFactory.getLog(CloudbaseRyaDAO.class);
-
-    public static void createTableIfNotExist(TableOperations tableOperations, String tableName) throws TableExistsException, CBSecurityException, CBException {
-        boolean tableExists = tableOperations.exists(tableName);
-        if (!tableExists) {
-            logger.info("Creating cloudbase table: " + tableName);
-            tableOperations.create(tableName);
-        }
-    }
-
-    public static Key from(TripleRow tripleRow) {
-        return new Key(defaultTo(tripleRow.getRow(), EMPTY_BYTES),
-                defaultTo(tripleRow.getColumnFamily(), EMPTY_BYTES),
-                defaultTo(tripleRow.getColumnQualifier(), EMPTY_BYTES),
-                defaultTo(tripleRow.getColumnVisibility(), EMPTY_BYTES),
-                defaultTo(tripleRow.getTimestamp(), Long.MAX_VALUE));
-    }
-
-    public static Value extractValue(TripleRow tripleRow) {
-        return new Value(defaultTo(tripleRow.getValue(), EMPTY_BYTES));
-    }
-
-    private static byte[] defaultTo(byte[] bytes, byte[] def) {
-        return bytes != null ? bytes : def;
-    }
-
-    private static Long defaultTo(Long l, Long def) {
-        return l != null ? l : def;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRyaDAO.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRyaDAO.java
deleted file mode 100644
index a3045e6..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/CloudbaseRyaDAO.java
+++ /dev/null
@@ -1,428 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.client.*;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.admin.TableOperations;
-import cloudbase.core.client.impl.TabletServerBatchDeleter;
-import cloudbase.core.conf.Property;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Range;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.security.ColumnVisibility;
-import com.google.common.collect.Iterators;
-import info.aduna.iteration.CloseableIteration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.layout.TableLayoutStrategy;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.RyaNamespaceManager;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-import mvm.rya.cloudbase.query.CloudbaseRyaQueryEngine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.Text;
-import org.openrdf.model.Namespace;
-
-import java.text.SimpleDateFormat;
-import java.util.*;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static mvm.rya.api.RdfCloudTripleStoreConstants.*;
-import static mvm.rya.cloudbase.CloudbaseRdfConstants.ALL_AUTHORIZATIONS;
-import static mvm.rya.cloudbase.CloudbaseRdfConstants.EMPTY_CV;
-
-/**
- * Class CloudbaseRyaDAO
- * Date: Feb 29, 2012
- * Time: 12:37:22 PM
- */
-public class CloudbaseRyaDAO implements RyaDAO<CloudbaseRdfConfiguration>, RyaNamespaceManager<CloudbaseRdfConfiguration> {
-    private static final Log logger = LogFactory.getLog(CloudbaseRyaDAO.class);
-
-    private boolean initialized = false;
-    private Connector connector;
-
-    private BatchWriter bw_spo;
-    private BatchWriter bw_po;
-    private BatchWriter bw_osp;
-    private BatchWriter bw_ns;
-
-    private CloudbaseRdfConfiguration conf = new CloudbaseRdfConfiguration();
-    private ColumnVisibility cv = EMPTY_CV;
-    private RyaTableMutationsFactory ryaTableMutationsFactory = new RyaTableMutationsFactory();
-    private TableLayoutStrategy tableLayoutStrategy;
-    private CloudbaseRyaQueryEngine queryEngine;
-    private RyaContext ryaContext = RyaContext.getInstance();
-
-    @Override
-    public boolean isInitialized() throws RyaDAOException {
-        return initialized;
-    }
-
-    @Override
-    public void init() throws RyaDAOException {
-        if (initialized)
-            return;
-        try {
-            checkNotNull(conf);
-            checkNotNull(connector);
-
-            tableLayoutStrategy = conf.getTableLayoutStrategy();
-            String cv_s = conf.getCv();
-            if (cv_s != null) {
-                cv = new ColumnVisibility(cv_s);
-            }
-
-            TableOperations tableOperations = connector.tableOperations();
-            CloudbaseRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getSpo());
-            CloudbaseRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getPo());
-            CloudbaseRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getOsp());
-            CloudbaseRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getNs());
-
-            //get the batch writers for tables
-            bw_spo = connector.createBatchWriter(tableLayoutStrategy.getSpo(), MAX_MEMORY, MAX_TIME,
-                    NUM_THREADS);
-            bw_po = connector.createBatchWriter(tableLayoutStrategy.getPo(), MAX_MEMORY, MAX_TIME,
-                    NUM_THREADS);
-            bw_osp = connector.createBatchWriter(tableLayoutStrategy.getOsp(), MAX_MEMORY, MAX_TIME,
-                    NUM_THREADS);
-
-            bw_ns = connector.createBatchWriter(tableLayoutStrategy.getNs(), MAX_MEMORY,
-                    MAX_TIME, 1);
-
-            queryEngine = new CloudbaseRyaQueryEngine(connector, getConf());
-
-            checkVersion();
-
-            initialized = true;
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    public String getVersion() throws RyaDAOException {
-        String version = null;
-        CloseableIteration<RyaStatement, RyaDAOException> versIter = queryEngine.query(new RyaStatement(RTS_SUBJECT_RYA, RTS_VERSION_PREDICATE_RYA, null), conf);
-        if (versIter.hasNext()) {
-            version = versIter.next().getObject().getData();
-        }
-        versIter.close();
-
-        return version;
-    }
-
-    @Override
-    public void add(RyaStatement statement) throws RyaDAOException {
-        commit(Iterators.singletonIterator(statement));
-    }
-
-    @Override
-    public void add(Iterator<RyaStatement> iter) throws RyaDAOException {
-        commit(iter);
-    }
-
-    @Override
-    public void delete(RyaStatement stmt, CloudbaseRdfConfiguration aconf) throws RyaDAOException {
-        this.delete(Iterators.singletonIterator(stmt), aconf);
-    }
-
-    @Override
-    public void delete(Iterator<RyaStatement> statements, CloudbaseRdfConfiguration conf) throws RyaDAOException {
-        try {
-            while (statements.hasNext()) {
-                RyaStatement stmt = statements.next();
-                //query first
-                CloseableIteration<RyaStatement, RyaDAOException> query = this.queryEngine.query(stmt, conf);
-                while (query.hasNext()) {
-                    deleteSingleRyaStatement(query.next());
-                }
-            }
-            bw_spo.flush();
-            bw_po.flush();
-            bw_osp.flush();
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    protected void deleteSingleRyaStatement(RyaStatement stmt) throws TripleRowResolverException, MutationsRejectedException {
-        Map<TABLE_LAYOUT, TripleRow> map = ryaContext.serializeTriple(stmt);
-        bw_spo.addMutation(deleteMutation(map.get(TABLE_LAYOUT.SPO)));
-        bw_po.addMutation(deleteMutation(map.get(TABLE_LAYOUT.PO)));
-        bw_osp.addMutation(deleteMutation(map.get(TABLE_LAYOUT.OSP)));
-    }
-
-    protected Mutation deleteMutation(TripleRow tripleRow) {
-        Mutation m = new Mutation(new Text(tripleRow.getRow()));
-
-        byte[] columnFamily = tripleRow.getColumnFamily();
-        Text cfText = columnFamily == null ? EMPTY_TEXT : new Text(columnFamily);
-
-        byte[] columnQualifier = tripleRow.getColumnQualifier();
-        Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier);
-
-        m.putDelete(cfText, cqText, new ColumnVisibility(tripleRow.getColumnVisibility()), tripleRow.getTimestamp());
-        return m;
-    }
-
-    protected void commit(Iterator<RyaStatement> commitStatements) throws RyaDAOException {
-        try {
-            //TODO: Should have a lock here in case we are adding and committing at the same time
-            while (commitStatements.hasNext()) {
-
-                Map<TABLE_LAYOUT, Collection<Mutation>> mutationMap = ryaTableMutationsFactory.serialize(commitStatements.next());
-                Collection<Mutation> spo = mutationMap.get(TABLE_LAYOUT.SPO);
-                Collection<Mutation> po = mutationMap.get(TABLE_LAYOUT.PO);
-                Collection<Mutation> osp = mutationMap.get(TABLE_LAYOUT.OSP);
-                bw_spo.addMutations(spo);
-                bw_po.addMutations(po);
-                bw_osp.addMutations(osp);
-            }
-
-            bw_spo.flush();
-            bw_po.flush();
-            bw_osp.flush();
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public void destroy() throws RyaDAOException {
-        if (!initialized) {
-            return;
-        }
-        //TODO: write lock
-        try {
-            initialized = false;
-            bw_osp.flush();
-            bw_spo.flush();
-            bw_po.flush();
-            bw_ns.flush();
-
-            bw_osp.close();
-            bw_spo.close();
-            bw_po.close();
-            bw_ns.close();
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public void addNamespace(String pfx, String namespace) throws RyaDAOException {
-        try {
-            Mutation m = new Mutation(new Text(pfx));
-            m.put(INFO_NAMESPACE_TXT, EMPTY_TEXT, new cloudbase.core.data.Value(
-                    namespace.getBytes()));
-            bw_ns.addMutation(m);
-            bw_ns.flush();
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public String getNamespace(String pfx) throws RyaDAOException {
-        try {
-            Scanner scanner = connector.createScanner(tableLayoutStrategy.getNs(),
-                    ALL_AUTHORIZATIONS);
-            scanner.fetchColumn(INFO_NAMESPACE_TXT, EMPTY_TEXT);
-            scanner.setRange(new Range(new Text(pfx)));
-            Iterator<Map.Entry<Key, cloudbase.core.data.Value>> iterator = scanner
-                    .iterator();
-
-            if (iterator.hasNext()) {
-                return new String(iterator.next().getValue().get());
-            }
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-        return null;
-    }
-
-    @Override
-    public void removeNamespace(String pfx) throws RyaDAOException {
-        try {
-            Mutation del = new Mutation(new Text(pfx));
-            del.putDelete(INFO_NAMESPACE_TXT, EMPTY_TEXT);
-            bw_ns.addMutation(del);
-            bw_ns.flush();
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public CloseableIteration<Namespace, RyaDAOException> iterateNamespace() throws RyaDAOException {
-        try {
-            Scanner scanner = connector.createScanner(tableLayoutStrategy.getNs(),
-                    ALL_AUTHORIZATIONS);
-            scanner.fetchColumnFamily(INFO_NAMESPACE_TXT);
-            Iterator<Map.Entry<Key, cloudbase.core.data.Value>> result = scanner.iterator();
-            return new CloudbaseNamespaceTableIterator(result);
-        } catch (Exception e) {
-            throw new RyaDAOException(e);
-        }
-    }
-
-    @Override
-    public RyaNamespaceManager<CloudbaseRdfConfiguration> getNamespaceManager() {
-        return this;
-    }
-
-    @Override
-    public void purge(RdfCloudTripleStoreConfiguration configuration) {
-        for (String tableName : getTables()) {
-            try {
-                purge(tableName, configuration.getAuths());
-                compact(tableName);
-            } catch (TableNotFoundException e) {
-                logger.error(e.getMessage());
-            } catch (MutationsRejectedException e) {
-                logger.error(e.getMessage());
-            }
-        }
-        try {
-            if (isInitialized()) {
-                checkVersion();
-            }
-        } catch (RyaDAOException e) {
-            logger.error("checkVersion() failed?", e);
-        }
-    }
-
-    @Override
-    public void dropAndDestroy() throws RyaDAOException {
-        for (String tableName : getTables()) {
-            try {
-                drop(tableName);
-            } catch (CBSecurityException e) {
-                logger.error(e.getMessage());
-                throw new RyaDAOException(e);
-            } catch (CBException e) {
-                logger.error(e.getMessage());
-                throw new RyaDAOException(e);
-            } catch (TableNotFoundException e) {
-                logger.warn(e.getMessage());
-            }
-        }
-        destroy();
-    }
-
-    public Connector getConnector() {
-        return connector;
-    }
-
-    public void setConnector(Connector connector) {
-        this.connector = connector;
-    }
-
-    public CloudbaseRdfConfiguration getConf() {
-        return conf;
-    }
-
-    public void setConf(CloudbaseRdfConfiguration conf) {
-        this.conf = conf;
-    }
-
-    public RyaTableMutationsFactory getRyaTableMutationsFactory() {
-        return ryaTableMutationsFactory;
-    }
-
-    public void setRyaTableMutationsFactory(RyaTableMutationsFactory ryaTableMutationsFactory) {
-        this.ryaTableMutationsFactory = ryaTableMutationsFactory;
-    }
-
-    public CloudbaseRyaQueryEngine getQueryEngine() {
-        return queryEngine;
-    }
-
-    public void setQueryEngine(CloudbaseRyaQueryEngine queryEngine) {
-        this.queryEngine = queryEngine;
-    }
-
-    protected String[] getTables() {
-        return new String[] {
-                tableLayoutStrategy.getSpo()
-                , tableLayoutStrategy.getPo()
-                , tableLayoutStrategy.getOsp()
-                , tableLayoutStrategy.getNs()
-                , tableLayoutStrategy.getEval()
-        };
-    }
-
-    private void purge(String tableName, String[] auths) throws TableNotFoundException, MutationsRejectedException {
-        if (tableExists(tableName)) {
-            logger.info("Purging cloudbase table: " + tableName);
-            BatchDeleter batchDeleter = createBatchDeleter(tableName, new Authorizations(auths));
-            try {
-                batchDeleter.setRanges(Collections.singleton(new Range()));
-                batchDeleter.delete();
-            } finally {
-                ((TabletServerBatchDeleter)batchDeleter).close();
-            }
-        }
-    }
-
-    private void compact(String tableName) {
-        Date now = new Date(System.currentTimeMillis());
-        SimpleDateFormat dateParser = new SimpleDateFormat("yyyyMMddHHmmssz", Locale.getDefault());
-        String nowStr = dateParser.format(now);
-        try {
-            for (Map.Entry<String, String> prop : connector.tableOperations().getProperties(tableName)) {
-                if (prop.getKey().equals(Property.TABLE_MAJC_COMPACTALL_AT.getKey())) {
-                    if (dateParser.parse(prop.getValue()).after(now)) {
-                        return;
-                    } else {
-                        break;
-                    }
-                }
-            }
-
-            connector.tableOperations().flush(tableName);
-            logger.info("Requesting major compaction for table " + tableName);
-            connector.tableOperations().setProperty(tableName, Property.TABLE_MAJC_COMPACTALL_AT.getKey(), nowStr);
-        } catch (Exception e) {
-            logger.error(e.getMessage());
-        }
-    }
-
-    private Authorizations getAuthorizations(String auth) {
-        if (auth == null) {
-            return new Authorizations();
-        } else {
-            String[] auths = auth.split(",");
-            return new Authorizations(auths);
-        }
-    }
-
-    private boolean tableExists(String tableName) {
-        return getConnector().tableOperations().exists(tableName);
-    }
-
-    private BatchDeleter createBatchDeleter(String tableName, Authorizations authorizations) throws TableNotFoundException {
-        return connector.createBatchDeleter(tableName, authorizations, NUM_THREADS, MAX_MEMORY, MAX_TIME, NUM_THREADS);
-    }
-
-    private void checkVersion() throws RyaDAOException {
-        String version = getVersion();
-        if (version == null) {
-            this.add(getVersionRyaStatement());
-        }
-        //TODO: Do a version check here
-    }
-
-    protected RyaStatement getVersionRyaStatement() {
-        return new RyaStatement(RTS_SUBJECT_RYA, RTS_VERSION_PREDICATE_RYA, VERSION_RYA);
-    }
-
-    private void drop(String tableName) throws CBSecurityException, CBException, TableNotFoundException {
-        logger.info("Dropping cloudbase table: " + tableName);
-        connector.tableOperations().delete(tableName);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableKeyValues.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableKeyValues.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableKeyValues.java
deleted file mode 100644
index 8869759..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableKeyValues.java
+++ /dev/null
@@ -1,93 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-import org.apache.hadoop.io.Text;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Map;
-
-import static java.util.AbstractMap.SimpleEntry;
-import static mvm.rya.cloudbase.CloudbaseRdfConstants.EMPTY_CV;
-import static mvm.rya.cloudbase.CloudbaseRdfConstants.EMPTY_VALUE;
-
-public class RyaTableKeyValues {
-    public static final ColumnVisibility EMPTY_CV = new ColumnVisibility();
-    public static final Text EMPTY_CV_TEXT = new Text(EMPTY_CV.getExpression());
-
-    RyaContext instance = RyaContext.getInstance();
-
-    private RyaStatement stmt;
-    private Collection<Map.Entry<Key, Value>> spo = new ArrayList<Map.Entry<Key, Value>>();
-    private Collection<Map.Entry<Key, Value>> po = new ArrayList<Map.Entry<Key, Value>>();
-    private Collection<Map.Entry<Key, Value>> osp = new ArrayList<Map.Entry<Key, Value>>();
-
-    public RyaTableKeyValues(RyaStatement stmt) {
-        this.stmt = stmt;
-    }
-
-    public Collection<Map.Entry<Key, Value>> getSpo() {
-        return spo;
-    }
-
-    public Collection<Map.Entry<Key, Value>> getPo() {
-        return po;
-    }
-
-    public Collection<Map.Entry<Key, Value>> getOsp() {
-        return osp;
-    }
-
-    public RyaTableKeyValues invoke() throws IOException {
-        /**
-         * TODO: If there are contexts, do we still replicate the information into the default graph as well
-         * as the named graphs?
-         */try {
-            Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, mvm.rya.api.resolver.triple.TripleRow> rowMap = instance.serializeTriple(stmt);
-            TripleRow tripleRow = rowMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO);
-            byte[] columnVisibility = tripleRow.getColumnVisibility();
-            Text cv = columnVisibility == null ? EMPTY_CV_TEXT : new Text(columnVisibility);
-            Long timestamp = tripleRow.getTimestamp();
-            timestamp = timestamp == null ? 0l : timestamp;
-            byte[] value = tripleRow.getValue();
-            Value v = value == null ? EMPTY_VALUE : new Value(value);
-            spo.add(new SimpleEntry(new Key(new Text(tripleRow.getRow()),
-                    new Text(tripleRow.getColumnFamily()),
-                    new Text(tripleRow.getColumnQualifier()),
-                    cv, timestamp), v));
-            tripleRow = rowMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO);
-            po.add(new SimpleEntry(new Key(new Text(tripleRow.getRow()),
-                    new Text(tripleRow.getColumnFamily()),
-                    new Text(tripleRow.getColumnQualifier()),
-                    cv, timestamp), v));
-            tripleRow = rowMap.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP);
-            osp.add(new SimpleEntry(new Key(new Text(tripleRow.getRow()),
-                    new Text(tripleRow.getColumnFamily()),
-                    new Text(tripleRow.getColumnQualifier()),
-                    cv, timestamp), v));
-        } catch (TripleRowResolverException e) {
-            throw new IOException(e);
-        }
-        return this;
-    }
-
-    @Override
-    public String toString() {
-        return "RyaTableKeyValues{" +
-                "statement=" + stmt +
-                ", spo=" + spo +
-                ", po=" + po +
-                ", o=" + osp +
-                '}';
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableMutationsFactory.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableMutationsFactory.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableMutationsFactory.java
deleted file mode 100644
index ab9b37d..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/RyaTableMutationsFactory.java
+++ /dev/null
@@ -1,81 +0,0 @@
-package mvm.rya.cloudbase;
-
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.ColumnVisibility;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-import org.apache.hadoop.io.Text;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT;
-import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-import static mvm.rya.cloudbase.CloudbaseRdfConstants.EMPTY_CV;
-import static mvm.rya.cloudbase.CloudbaseRdfConstants.EMPTY_VALUE;
-
-public class RyaTableMutationsFactory {
-
-    RyaContext ryaContext = RyaContext.getInstance();
-
-    public RyaTableMutationsFactory() {
-    }
-
-    //TODO: Does this still need to be collections
-    public Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize(
-            RyaStatement stmt) throws IOException {
-
-        Collection<Mutation> spo_muts = new ArrayList<Mutation>();
-        Collection<Mutation> po_muts = new ArrayList<Mutation>();
-        Collection<Mutation> osp_muts = new ArrayList<Mutation>();
-        /**
-         * TODO: If there are contexts, do we still replicate the information into the default graph as well
-         * as the named graphs?
-         */
-        try {
-            Map<TABLE_LAYOUT, TripleRow> rowMap = ryaContext.serializeTriple(stmt);
-            TripleRow tripleRow = rowMap.get(TABLE_LAYOUT.SPO);
-            spo_muts.add(createMutation(tripleRow));
-            tripleRow = rowMap.get(TABLE_LAYOUT.PO);
-            po_muts.add(createMutation(tripleRow));
-            tripleRow = rowMap.get(TABLE_LAYOUT.OSP);
-            osp_muts.add(createMutation(tripleRow));
-        } catch (TripleRowResolverException fe) {
-            throw new IOException(fe);
-        }
-
-        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> mutations =
-                new HashMap<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>>();
-        mutations.put(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO, spo_muts);
-        mutations.put(RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO, po_muts);
-        mutations.put(RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP, osp_muts);
-
-        return mutations;
-    }
-
-    protected Mutation createMutation(TripleRow tripleRow) {
-        Mutation mutation = new Mutation(new Text(tripleRow.getRow()));
-        byte[] columnVisibility = tripleRow.getColumnVisibility();
-        ColumnVisibility cv = columnVisibility == null ? EMPTY_CV : new ColumnVisibility(columnVisibility);
-        Long timestamp = tripleRow.getTimestamp();
-        timestamp = timestamp == null ? 0l : timestamp;
-        byte[] value = tripleRow.getValue();
-        Value v = value == null ? EMPTY_VALUE : new Value(value);
-        byte[] columnQualifier = tripleRow.getColumnQualifier();
-        Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier);
-        byte[] columnFamily = tripleRow.getColumnFamily();
-        Text cfText = columnFamily == null ? EMPTY_TEXT : new Text(columnFamily);
-
-        mutation.put(cfText,cqText, cv, timestamp, v);
-        return mutation;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/eval/CloudbaseRdfCountTool.java
----------------------------------------------------------------------
diff --git a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/eval/CloudbaseRdfCountTool.java b/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/eval/CloudbaseRdfCountTool.java
deleted file mode 100644
index 5c6e8cf..0000000
--- a/dao/cloudbase.rya/src/main/java/mvm/rya/cloudbase/mr/eval/CloudbaseRdfCountTool.java
+++ /dev/null
@@ -1,350 +0,0 @@
-package mvm.rya.cloudbase.mr.eval;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.iterators.filter.AgeOffFilter;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.security.ColumnVisibility;
-import cloudbase.core.util.Pair;
-import com.google.common.collect.Lists;
-import com.google.common.io.ByteArrayDataInput;
-import com.google.common.io.ByteArrayDataOutput;
-import com.google.common.io.ByteStreams;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-import mvm.rya.cloudbase.CloudbaseRdfConstants;
-import mvm.rya.cloudbase.mr.utils.MRUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-
-/**
- * Count subject, predicate, object. Save in table
- * Class RdfCloudTripleStoreCountTool
- * Date: Apr 12, 2011
- * Time: 10:39:40 AM
- */
-public class CloudbaseRdfCountTool implements Tool {
-
-    public static final String TTL_PROP = "mvm.rya.cloudbase.sail.mr.eval.ttl";
-
-    private Configuration conf;
-
-    public static void main(String[] args) {
-        try {
-
-            ToolRunner.run(new Configuration(), new CloudbaseRdfCountTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    /**
-     * cloudbase props
-     */
-    private RdfCloudTripleStoreConstants.TABLE_LAYOUT rdfTableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP;
-    private String userName = "root";
-    private String pwd = "password";
-    private String instance = "stratus";
-    private String zk = "10.40.190.113:2181";
-    private Authorizations authorizations = CBConstants.NO_AUTHS;
-    private String ttl = null;
-
-    @Override
-    public int run(String[] strings) throws Exception {
-        conf.set(MRUtils.JOB_NAME_PROP, "Gather Evaluation Statistics");
-
-        //conf
-        zk = conf.get(MRUtils.CB_ZK_PROP, zk);
-        ttl = conf.get(MRUtils.CB_TTL_PROP, ttl);
-        instance = conf.get(MRUtils.CB_INSTANCE_PROP, instance);
-        userName = conf.get(MRUtils.CB_USERNAME_PROP, userName);
-        pwd = conf.get(MRUtils.CB_PWD_PROP, pwd);
-        boolean mock = conf.getBoolean(MRUtils.CB_MOCK_PROP, false);
-        String tablePrefix = conf.get(MRUtils.TABLE_PREFIX_PROPERTY, null);
-        if (tablePrefix != null)
-            RdfCloudTripleStoreConstants.prefixTables(tablePrefix);
-        rdfTableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.valueOf(
-                conf.get(MRUtils.TABLE_LAYOUT_PROP, RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP.toString()));
-
-        String auth = conf.get(MRUtils.CB_AUTH_PROP);
-        if (auth != null)
-            authorizations = new Authorizations(auth.split(","));
-
-        conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-        conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-        conf.set("io.sort.mb", "256");
-        Job job = new Job(conf);
-        job.setJarByClass(CloudbaseRdfCountTool.class);
-
-        //set ttl
-        ttl = conf.get(TTL_PROP);
-
-        // set up cloudbase input
-        job.setInputFormatClass(CloudbaseInputFormat.class);
-        CloudbaseInputFormat.setInputInfo(job, userName, pwd.getBytes(),
-                RdfCloudTripleStoreUtils.layoutPrefixToTable(rdfTableLayout, tablePrefix), authorizations);
-        CloudbaseInputFormat.setZooKeeperInstance(job, instance, zk);
-        Collection<Pair<Text, Text>> columns = new ArrayList<Pair<Text, Text>>();
-        //TODO: What about named graphs/contexts here?
-//        final Pair pair = new Pair(RdfCloudTripleStoreConstants.INFO_TXT, RdfCloudTripleStoreConstants.INFO_TXT);
-//        columns.add(pair);
-//        CloudbaseInputFormat.fetchColumns(job, columns);
-        if (ttl != null) {
-            CloudbaseInputFormat.setIterator(job, 1, FilteringIterator.class.getName(), "filteringIterator");
-            CloudbaseInputFormat.setIteratorOption(job, "filteringIterator", "0", AgeOffFilter.class.getName());
-            CloudbaseInputFormat.setIteratorOption(job, "filteringIterator", "0.ttl", ttl);
-        }
-
-        CloudbaseInputFormat.setRanges(job, Lists.newArrayList(new Range(new Text(new byte[]{}), new Text(new byte[]{Byte.MAX_VALUE}))));
-
-        // set input output of the particular job
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(LongWritable.class);
-        job.setOutputKeyClass(Text.class);
-        job.setOutputValueClass(Mutation.class);
-
-        // set mapper and reducer classes
-        job.setMapperClass(CountPiecesMapper.class);
-        job.setCombinerClass(CountPiecesCombiner.class);
-        job.setReducerClass(CountPiecesReducer.class);
-
-        CloudbaseOutputFormat.setOutputInfo(job, userName, pwd.getBytes(), true, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX);
-        CloudbaseOutputFormat.setZooKeeperInstance(job, instance, zk);
-        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-
-        // Submit the job
-        Date startTime = new Date();
-        System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
-
-        if (exitCode == 0) {
-            Date end_time = new Date();
-            System.out.println("Job ended: " + end_time);
-            System.out.println("The job took "
-                    + (end_time.getTime() - startTime.getTime()) / 1000
-                    + " seconds.");
-            return 0;
-        } else {
-            System.out.println("Job Failed!!!");
-        }
-
-        return -1;
-    }
-
-    @Override
-    public void setConf(Configuration configuration) {
-        this.conf = configuration;
-    }
-
-    @Override
-    public Configuration getConf() {
-        return conf;
-    }
-
-    public String getInstance() {
-        return instance;
-    }
-
-    public void setInstance(String instance) {
-        this.instance = instance;
-    }
-
-    public String getPwd() {
-        return pwd;
-    }
-
-    public void setPwd(String pwd) {
-        this.pwd = pwd;
-    }
-
-    public String getZk() {
-        return zk;
-    }
-
-    public void setZk(String zk) {
-        this.zk = zk;
-    }
-
-    public String getTtl() {
-        return ttl;
-    }
-
-    public void setTtl(String ttl) {
-        this.ttl = ttl;
-    }
-
-    public String getUserName() {
-        return userName;
-    }
-
-    public void setUserName(String userName) {
-        this.userName = userName;
-    }
-
-    public static class CountPiecesMapper extends Mapper<Key, Value, Text, LongWritable> {
-
-        public static final byte[] EMPTY_BYTES = new byte[0];
-        private RdfCloudTripleStoreConstants.TABLE_LAYOUT tableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP;
-
-        ValueFactoryImpl vf = new ValueFactoryImpl();
-
-        private Text keyOut = new Text();
-        private LongWritable valOut = new LongWritable(1);
-        private RyaContext ryaContext = RyaContext.getInstance();
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            Configuration conf = context.getConfiguration();
-            tableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.valueOf(
-                    conf.get(MRUtils.TABLE_LAYOUT_PROP, RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP.toString()));
-        }
-
-        @Override
-        protected void map(Key key, Value value, Context context) throws IOException, InterruptedException {
-            try {
-                RyaStatement statement = ryaContext.deserializeTriple(tableLayout, new TripleRow(key.getRow().getBytes(), key.getColumnFamily().getBytes(), key.getColumnQualifier().getBytes()));
-                //count each piece subject, pred, object
-
-                String subj = statement.getSubject().getData();
-                String pred = statement.getPredicate().getData();
-//                byte[] objBytes = tripleFormat.getValueFormat().serialize(statement.getObject());
-                RyaURI scontext = statement.getContext();
-                boolean includesContext = scontext != null;
-                String scontext_str = (includesContext) ? scontext.getData() : null;
-
-                ByteArrayDataOutput output = ByteStreams.newDataOutput();
-                output.writeUTF(subj);
-                output.writeUTF(RdfCloudTripleStoreConstants.SUBJECT_CF);
-                output.writeBoolean(includesContext);
-                if (includesContext)
-                    output.writeUTF(scontext_str);
-                keyOut.set(output.toByteArray());
-                context.write(keyOut, valOut);
-
-                output = ByteStreams.newDataOutput();
-                output.writeUTF(pred);
-                output.writeUTF(RdfCloudTripleStoreConstants.PRED_CF);
-                output.writeBoolean(includesContext);
-                if (includesContext)
-                    output.writeUTF(scontext_str);
-                keyOut.set(output.toByteArray());
-                context.write(keyOut, valOut);
-
-
-                //TODO: Obj in eval stats table?
-//                output = ByteStreams.newDataOutput();
-//                output.write(objBytes);
-//                output.writeByte(RdfCloudTripleStoreConstants.DELIM_BYTE);
-//                output.writeUTF(RdfCloudTripleStoreConstants.OBJ_CF);
-//                output.writeBoolean(includesContext);
-//                if (includesContext)
-//                    output.write(scontext_bytes);
-//                keyOut.set(output.toByteArray());
-//                context.write(keyOut, valOut);
-            } catch (TripleRowResolverException e) {
-                throw new IOException(e);
-            }
-        }
-    }
-
-    public static class CountPiecesCombiner extends Reducer<Text, LongWritable, Text, LongWritable> {
-
-        private LongWritable valOut = new LongWritable();
-
-        // TODO: can still add up to be larger I guess
-        // any count lower than this does not need to be saved
-        public static final int TOO_LOW = 2;
-
-        @Override
-        protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
-            long count = 0;
-            for (LongWritable lw : values) {
-                count += lw.get();
-            }
-
-            if (count <= TOO_LOW)
-                return;
-
-            valOut.set(count);
-            context.write(key, valOut);
-        }
-
-    }
-
-    public static class CountPiecesReducer extends Reducer<Text, LongWritable, Text, Mutation> {
-
-        Text row = new Text();
-        Text cat_txt = new Text();
-        Value v_out = new Value();
-        ValueFactory vf = new ValueFactoryImpl();
-
-        // any count lower than this does not need to be saved
-        public static final int TOO_LOW = 10;
-        private String tablePrefix;
-        protected Text table;
-        private ColumnVisibility cv = CloudbaseRdfConstants.EMPTY_CV;
-
-        @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            tablePrefix = context.getConfiguration().get(MRUtils.TABLE_PREFIX_PROPERTY, RdfCloudTripleStoreConstants.TBL_PRFX_DEF);
-            table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX);
-            final String cv_s = context.getConfiguration().get(MRUtils.CB_CV_PROP);
-            if (cv_s != null)
-                cv = new ColumnVisibility(cv_s);
-        }
-
-        @Override
-        protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
-            long count = 0;
-            for (LongWritable lw : values) {
-                count += lw.get();
-            }
-
-            if (count <= TOO_LOW)
-                return;
-
-            ByteArrayDataInput badi = ByteStreams.newDataInput(key.getBytes());
-            String v = badi.readUTF();
-            cat_txt.set(badi.readUTF());
-
-            Text columnQualifier = RdfCloudTripleStoreConstants.EMPTY_TEXT;
-            boolean includesContext = badi.readBoolean();
-            if (includesContext) {
-                columnQualifier = new Text(badi.readUTF());
-            }
-
-            row.set(v);
-            Mutation m = new Mutation(row);
-            v_out.set((count + "").getBytes());
-            m.put(cat_txt, columnQualifier, cv, v_out);
-            context.write(table, m);
-        }
-
-    }
-}
\ No newline at end of file


[36/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java
index ca16e86..fcf0dc7 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java
@@ -2,25 +2,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 public interface QueryParserTreeConstants
 {
   public int JJTSIMPLENODE = 0;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
index bb7d581..3f49d1f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
@@ -3,25 +3,906 @@
 package mvm.rya.indexing.accumulo.freetext.query;
 
 /*
- * #%L
- * mvm.rya.indexing.accumulo
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
 /**
  * An implementation of interface CharStream, where the stream is assumed to
  * contain only ASCII characters (without unicode processing).


[23/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/BulkNtripsInputTool.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/BulkNtripsInputTool.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/BulkNtripsInputTool.java
deleted file mode 100644
index fea882d..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/BulkNtripsInputTool.java
+++ /dev/null
@@ -1,326 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.fileinput.bulk;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Instance;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.client.mapreduce.CloudbaseFileOutputFormat;
-import cloudbase.core.client.mapreduce.lib.partition.RangePartitioner;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.util.TextUtil;
-import com.google.common.base.Preconditions;
-import mvm.rya.cloudbase.utils.bulk.KeyRangePartitioner;
-import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.RawComparator;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.openrdf.model.Statement;
-import org.openrdf.rio.RDFHandler;
-import org.openrdf.rio.RDFHandlerException;
-import org.openrdf.rio.RDFParser;
-import org.openrdf.rio.ntriples.NTriplesParserFactory;
-
-import java.io.BufferedOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.io.StringReader;
-import java.util.Collection;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeStatement;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeValue;
-
-/**
- * Take large ntrips files and use MapReduce and Cloudbase
- * Bulk ingest techniques to load into the table in our partition format.
- * <p/>
- * Input: NTrips file
- * Map:
- * - key : shard row - Text
- * - value : stmt in doc triple format - Text
- * Partitioner: RangePartitioner
- * Reduce:
- * - key : all the entries for each triple - Cloudbase Key
- * Class BulkNtripsInputTool
- * Date: Sep 13, 2011
- * Time: 10:00:17 AM
- */
-public class BulkNtripsInputTool extends Configured implements Tool {
-
-    private static DateHashModShardValueGenerator generator = new DateHashModShardValueGenerator();
-    public static final String BASE_MOD = "baseMod";
-
-    @Override
-    public int run(String[] args) throws Exception {
-        Preconditions.checkArgument(args.length >= 7, "Usage: hadoop jar jarfile BulkNtripsInputTool <cb instance>" +
-                " <zookeepers> <username> <password> <output table> <hdfs ntrips dir> <work dir> (<shard size>)");
-
-        Configuration conf = getConf();
-        PrintStream out = null;
-        try {
-            Job job = new Job(conf, "Bulk Ingest NTrips to Partition RDF");
-            job.setJarByClass(this.getClass());
-
-            //setting long job
-            job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", false);
-            job.getConfiguration().setBoolean("mapred.reduce.tasks.speculative.execution", false);
-            job.getConfiguration().set("io.sort.mb", "256");
-
-            job.setInputFormatClass(TextInputFormat.class);
-
-            job.setMapperClass(ParseNtripsMapper.class);
-            job.setMapOutputKeyClass(Key.class);
-            job.setMapOutputValueClass(Value.class);
-
-            job.setCombinerClass(OutStmtMutationsReducer.class);
-            job.setReducerClass(OutStmtMutationsReducer.class);
-            job.setOutputFormatClass(CloudbaseFileOutputFormat.class);
-            CloudbaseFileOutputFormat.setZooKeeperInstance(job, args[0], args[1]);
-
-            Instance instance = new ZooKeeperInstance(args[0], args[1]);
-            String user = args[2];
-            byte[] pass = args[3].getBytes();
-            String tableName = args[4];
-            String inputDir = args[5];
-            String workDir = args[6];
-            if(args.length > 7) {
-                int baseMod = Integer.parseInt(args[7]);
-                generator.setBaseMod(baseMod);
-                job.getConfiguration().setInt(BASE_MOD, baseMod);
-            }
-
-            Connector connector = instance.getConnector(user, pass);
-
-            TextInputFormat.setInputPaths(job, new Path(inputDir));
-
-            FileSystem fs = FileSystem.get(conf);
-            Path workPath = new Path(workDir);
-            if (fs.exists(workPath))
-                fs.delete(workPath, true);
-
-            CloudbaseFileOutputFormat.setOutputPath(job, new Path(workDir + "/files"));
-
-            out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir + "/splits.txt"))));
-
-            Collection<Text> splits = connector.tableOperations().getSplits(tableName, Integer.MAX_VALUE);
-            for (Text split : splits)
-                out.println(new String(Base64.encodeBase64(TextUtil.getBytes(split))));
-
-            job.setNumReduceTasks(splits.size() + 1);
-            out.close();
-
-            job.setPartitionerClass(KeyRangePartitioner.class);
-            RangePartitioner.setSplitFile(job, workDir + "/splits.txt");
-
-            job.waitForCompletion(true);
-
-            connector.tableOperations().importDirectory(
-                    tableName,
-                    workDir + "/files",
-                    workDir + "/failures",
-                    20,
-                    4,
-                    false);
-
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        } finally {
-            if (out != null)
-                out.close();
-        }
-
-        return 0;
-    }
-
-    public static void main(String[] args) {
-        try {
-            ToolRunner.run(new Configuration(), new BulkNtripsInputTool(), args);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    /**
-     * input: ntrips format triple
-     * <p/>
-     * output: key: shard row from generator
-     * value: stmt in serialized format (document format)
-     */
-    public static class ParseNtripsMapper extends Mapper<LongWritable, Text, Key, Value> {
-        private static final NTriplesParserFactory N_TRIPLES_PARSER_FACTORY = new NTriplesParserFactory();
-
-        private Text outputKey = new Text();
-        private Text outputValue = new Text();
-        private RDFParser parser;
-        private static byte[] EMPTY_BYTE_ARRAY = new byte[0];
-
-        @Override
-        protected void setup(final Context context) throws IOException, InterruptedException {
-            super.setup(context);
-            Configuration conf = context.getConfiguration();
-            generator.setBaseMod(conf.getInt(BASE_MOD, generator.getBaseMod()));
-            parser = N_TRIPLES_PARSER_FACTORY.getParser();
-            parser.setRDFHandler(new RDFHandler() {
-
-                @Override
-                public void startRDF() throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void endRDF() throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void handleNamespace(String s, String s1) throws RDFHandlerException {
-
-                }
-
-                @Override
-                public void handleStatement(Statement statement) throws RDFHandlerException {
-                    try {
-//                        byte[] doc_serialized = writeStatement(statement, true);
-                        Text shard = new Text(generator.generateShardValue(statement.getSubject()));
-
-                        context.write(new Key(shard, DOC, new Text(writeStatement(statement, true))), EMPTY_VALUE);
-                        context.write(new Key(shard, INDEX, new Text(writeStatement(statement, false))), EMPTY_VALUE);
-                        //TODO: Wish we didn't have to do this constantly, probably better to just aggregate all subjects and do it once
-                        context.write(new Key(new Text(writeValue(statement.getSubject())), shard, EMPTY_TXT), EMPTY_VALUE);
-
-//                        outputKey.set(key);
-//                        outputValue.set(doc_serialized);
-//                        context.write(outputKey, outputValue);
-//                        outputKey.set(writeValue(statement.getSubject()));
-//                        outputValue.set(EMPTY_BYTE_ARRAY);
-//                        context.write(outputKey, outputValue);
-                    } catch (Exception e) {
-                        throw new RDFHandlerException(e);
-                    }
-                }
-
-                @Override
-                public void handleComment(String s) throws RDFHandlerException {
-
-                }
-            });
-        }
-
-        @Override
-        public void map(LongWritable key, Text value, Context output)
-                throws IOException, InterruptedException {
-            try {
-                parser.parse(new StringReader(value.toString()), "");
-            } catch (Exception e) {
-                throw new IOException("Exception occurred parsing ntrips triple[" + value + "]");
-            }
-        }
-    }
-
-    public static class OutStmtMutationsReducer extends Reducer<Key, Value, Key, Value> {
-
-        public void reduce(Key key, Iterable<Value> values, Context output)
-                throws IOException, InterruptedException {
-            output.write(key, EMPTY_VALUE);
-//            System.out.println(key);
-//            for (Value value : values) {
-//                System.out.println(value);
-            /**
-             * Each of these is a triple.
-             * 1. format back to statement
-             * 2. Output the doc,index key,value pairs for each triple
-             */
-//                Statement stmt = readStatement(ByteStreams.newDataInput(value.getBytes()), VALUE_FACTORY);
-//                output.write(new Key(shardKey, DOC, new Text(writeStatement(stmt, true))), EMPTY_VALUE);
-//                output.write(new Key(shardKey, INDEX, new Text(writeStatement(stmt, false))), EMPTY_VALUE);
-//                //TODO: Wish we didn't have to do this constantly, probably better to just aggregate all subjects and do it once
-//                output.write(new Key(new Text(writeValue(stmt.getSubject())), shardKey, EMPTY_TXT), EMPTY_VALUE);
-//            }
-        }
-    }
-
-    public static class EmbedKeyGroupingComparator implements RawComparator<Text> {
-
-        public EmbedKeyGroupingComparator() {
-
-        }
-
-        @Override
-        public int compare(byte[] arg0, int arg1, int arg2, byte[] arg3, int arg4,
-                           int arg5) {
-            DataInputBuffer n = new DataInputBuffer();
-
-            Text temp1 = new Text();
-            Text temp2 = new Text();
-
-            try {
-                n.reset(arg0, arg1, arg2);
-                temp1.readFields(n);
-                n.reset(arg3, arg4, arg5);
-                temp2.readFields(n);
-            } catch (IOException e) {
-                // TODO Auto-generated catch block
-                //e.printStackTrace();
-                throw new RuntimeException(e);
-            }
-
-            return compare(temp1, temp2);
-        }
-
-        @Override
-        public int compare(Text a1, Text a2) {
-            return EmbedKeyRangePartitioner.retrieveEmbedKey(a1).compareTo(EmbedKeyRangePartitioner.retrieveEmbedKey(a2));
-        }
-
-    }
-
-    /**
-     * Really it does a normal Text compare
-     */
-    public static class EmbedKeySortComparator implements RawComparator<Text> {
-
-        public EmbedKeySortComparator() {
-
-        }
-
-        @Override
-        public int compare(byte[] arg0, int arg1, int arg2, byte[] arg3, int arg4,
-                           int arg5) {
-            DataInputBuffer n = new DataInputBuffer();
-
-            Text temp1 = new Text();
-            Text temp2 = new Text();
-
-            try {
-                n.reset(arg0, arg1, arg2);
-                temp1.readFields(n);
-                n.reset(arg3, arg4, arg5);
-                temp2.readFields(n);
-            } catch (IOException e) {
-                // TODO Auto-generated catch block
-                //e.printStackTrace();
-                throw new RuntimeException(e);
-            }
-
-            return compare(temp1, temp2);
-        }
-
-        @Override
-        public int compare(Text a1, Text a2) {
-            return a1.compareTo(a2);
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitioner.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitioner.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitioner.java
deleted file mode 100644
index f72c382..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitioner.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.fileinput.bulk;
-
-import cloudbase.core.client.mapreduce.lib.partition.RangePartitioner;
-import mvm.mmrts.rdf.partition.PartitionConstants;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-
-/**
- * Class EmbedKeyRangePartitioner
- * Date: Sep 13, 2011
- * Time: 1:49:35 PM
- */
-public class EmbedKeyRangePartitioner extends RangePartitioner {
-    @Override
-    public int getPartition(Text key, Writable value, int numPartitions) {
-        Text embedKey = retrieveEmbedKey(key);
-        return super.getPartition(embedKey, value, numPartitions);
-    }
-
-    public static Text retrieveEmbedKey(Text key) {
-        int split = key.find(PartitionConstants.INDEX_DELIM_STR);
-        if (split < 0)
-            return key;
-        Text newText = new Text();
-        newText.append(key.getBytes(), 0, split);
-        return newText;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/iterators/SortedEncodedRangeIterator.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/iterators/SortedEncodedRangeIterator.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/iterators/SortedEncodedRangeIterator.java
deleted file mode 100644
index a83d594..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/iterators/SortedEncodedRangeIterator.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.iterators;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.IteratorEnvironment;
-import cloudbase.core.iterators.SortedKeyValueIterator;
-import cloudbase.core.util.TextUtil;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.io.Text;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Class SortedEncodedRangeIterator
- * Date: Sep 8, 2011
- * Time: 6:01:28 PM
- */
-public class SortedEncodedRangeIterator extends SortedRangeIterator {
-
-    @Override
-    public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-        super.init(source, options, env);
-        if (options.containsKey(OPTION_LOWER_BOUND)) {
-            lower = new Text(decode(options.get(OPTION_LOWER_BOUND)));
-        } else {
-            lower = new Text("\u0000");
-        }
-
-        if (options.containsKey(OPTION_UPPER_BOUND)) {
-            upper = new Text(decode(options.get(OPTION_UPPER_BOUND)));
-        } else {
-            upper = new Text("\u0000");
-        }
-    }
-
-    public static String encode(String txt) {
-        return new String(Base64.encodeBase64(txt.getBytes()));
-    }
-
-    public static String decode(String txt) {
-        return new String(Base64.decodeBase64(txt.getBytes()));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectCombiner.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectCombiner.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectCombiner.java
deleted file mode 100644
index e360ca7..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectCombiner.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.transform;
-
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.mapreduce.Reducer;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Since each subject is located at most on one tablet, we should be able to assume that
- * no reducer is needed.  The Combine phase should aggregate properly.
- * <p/>
- * Class AggregateTriplesBySubjectReducer
- * Date: Sep 1, 2011
- * Time: 5:39:24 PM
- */
-public class AggregateTriplesBySubjectCombiner extends Reducer<Text, MapWritable, Text, MapWritable> {
-//    private LongWritable lwout = new LongWritable();
-    private MapWritable mwout = new MapWritable();
-
-    @Override
-    protected void reduce(Text key, Iterable<MapWritable> values, Context context) throws IOException, InterruptedException {
-        for (MapWritable value : values) {
-            for (Map.Entry<Writable, Writable> entry : value.entrySet()) {
-                mwout.put(WritableUtils.clone(entry.getKey(), context.getConfiguration()),
-                        WritableUtils.clone(entry.getValue(), context.getConfiguration()));
-            }
-        }
-        context.write(key, mwout);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectReducer.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectReducer.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectReducer.java
deleted file mode 100644
index 2ea5fa8..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/AggregateTriplesBySubjectReducer.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.transform;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.mapreduce.Reducer;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import static mvm.mmrts.rdf.partition.mr.transform.SparqlCloudbaseIFTransformerConstants.SELECT_FILTER;
-
-/**
- * Since each subject is located at most on one tablet, we should be able to assume that
- * no reducer is needed.  The Combine phase should aggregate properly.
- * <p/>
- * Class AggregateTriplesBySubjectReducer
- * Date: Sep 1, 2011
- * Time: 5:39:24 PM
- */
-public class AggregateTriplesBySubjectReducer extends Reducer<Text, MapWritable, LongWritable, MapWritable> {
-    private LongWritable lwout = new LongWritable();
-    private MapWritable mwout = new MapWritable();
-
-    @Override
-    protected void reduce(Text key, Iterable<MapWritable> values, Context context) throws IOException, InterruptedException {
-        for (MapWritable value : values) {
-            for (Map.Entry<Writable, Writable> entry : value.entrySet()) {
-                mwout.put(WritableUtils.clone(entry.getKey(), context.getConfiguration()),
-                        WritableUtils.clone(entry.getValue(), context.getConfiguration()));
-            }
-        }
-        lwout.set(key.hashCode());
-        context.write(lwout, mwout);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/KeyValueToMapWrMapper.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/KeyValueToMapWrMapper.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/KeyValueToMapWrMapper.java
deleted file mode 100644
index 0630501..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/KeyValueToMapWrMapper.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.transform;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import com.google.common.io.ByteStreams;
-import mvm.mmrts.rdf.partition.PartitionConstants;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.openrdf.model.Statement;
-
-import java.io.IOException;
-import java.util.*;
-
-import static mvm.mmrts.rdf.partition.mr.transform.SparqlCloudbaseIFTransformerConstants.*;
-
-/**
- * Will take a triple and output: <subject, predObj map>
- * <p/>
- * Class KeyValueToMapWrMapper
- * Date: Sep 1, 2011
- * Time: 4:56:42 PM
- */
-public class KeyValueToMapWrMapper extends Mapper<Key, Value, Text, MapWritable> {
-
-//    private List<String> predicateFilter = new ArrayList<String>();
-
-    private Text subjNameTxt;
-    private Text keyout = new Text();
-    private Text predout = new Text();
-    private Text objout = new Text();
-
-    private Map<String, String> predValueName = new HashMap();
-
-    @Override
-    protected void setup(Context context) throws IOException, InterruptedException {
-        super.setup(context);
-        //find the values to filter on
-        Configuration conf = context.getConfiguration();
-        String[] filter = conf.getStrings(SELECT_FILTER);
-        if (filter != null) {
-            for (String predValue : filter) {
-                String predName = conf.get(predValue);
-                if (predName != null)
-                    predValueName.put(predValue, predName);
-            }
-        }
-
-        String subjName = conf.get(SUBJECT_NAME);
-        if (subjName != null) {
-            //not sure it will ever be null
-            subjNameTxt = new Text(subjName);
-        }
-    }
-
-    @Override
-    protected void map(Key key, Value value, Context context) throws IOException, InterruptedException {
-        Statement stmt = RdfIO.readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), PartitionConstants.VALUE_FACTORY);
-        String predName = predValueName.get(stmt.getPredicate().stringValue());
-        if (predName == null)
-            return;
-
-        keyout.set(stmt.getSubject().stringValue());
-        predout.set(predName);
-        objout.set(stmt.getObject().stringValue());
-        MapWritable mw = new MapWritable();
-        mw.put(predout, objout);
-        if (subjNameTxt != null) {
-            mw.put(subjNameTxt, keyout);
-        }
-        context.write(keyout, mw);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFJob.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFJob.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFJob.java
deleted file mode 100644
index 56014f9..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFJob.java
+++ /dev/null
@@ -1,118 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.transform;
-
-import cloudbase.core.util.ArgumentChecker;
-import mvm.mmrts.rdf.partition.query.evaluation.FilterTimeIndexVisitor;
-import mvm.mmrts.rdf.partition.query.evaluation.SubjectGroupingOptimizer;
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.QueryModelNode;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-import org.openrdf.query.parser.QueryParser;
-import org.openrdf.query.parser.sparql.SPARQLParserFactory;
-
-/**
- * Class SparqlCloudbaseIFJob
- * Date: Sep 1, 2011
- * Time: 6:04:35 PM
- */
-public class SparqlCloudbaseIFJob {
-
-    private String[] queries;
-    private String table;
-
-    //Cloudbase properties
-    private String userName;
-    private String pwd;
-    private String instance;
-    private String zk;
-    //
-
-    private Class classOriginal; //Calling class for this job.
-    private String outputPath;
-
-    public SparqlCloudbaseIFJob(String table, String userName, String pwd, String instance, String zk,
-                                String outputPath, Class classOriginal, String... queries) {
-        ArgumentChecker.notNull(queries);
-        this.queries = queries;
-        this.table = table;
-        this.userName = userName;
-        this.pwd = pwd;
-        this.instance = instance;
-        this.zk = zk;
-        this.outputPath = outputPath;
-        this.classOriginal = classOriginal;
-    }
-
-    public String[] run() throws Exception {
-        int count = 0;
-        outputPath = outputPath + "/results/";
-        String[] resultsOut = new String[queries.length];
-
-        for (String query : queries) {
-            QueryParser parser = (new SPARQLParserFactory()).getParser();
-            TupleExpr expr = parser.parseQuery(query, "http://www.w3.org/1999/02/22-rdf-syntax-ns#").getTupleExpr();
-
-            final Configuration queryConf = new Configuration();
-            expr.visit(new FilterTimeIndexVisitor(queryConf));
-
-            (new SubjectGroupingOptimizer(queryConf)).optimize(expr, null, null);
-
-            //make sure of only one shardlookup
-            expr.visit(new QueryModelVisitorBase<RuntimeException>() {
-                int count = 0;
-
-                @Override
-                public void meetOther(QueryModelNode node) throws RuntimeException {
-                    super.meetOther(node);
-                    count++;
-                    if (count > 1)
-                        throw new IllegalArgumentException("Query can only have one subject-star lookup");
-                }
-            });
-
-            final Job job = new Job(queryConf);
-            job.setJarByClass(classOriginal);
-            job.setJobName("SparqlCloudbaseIFTransformer. Query: " + ((query.length() > 32) ? (query.substring(0, 32)) : (query)));
-
-            expr.visit(new QueryModelVisitorBase<RuntimeException>() {
-                @Override
-                public void meetOther(QueryModelNode node) throws RuntimeException {
-                    super.meetOther(node);
-
-                    //set up CloudbaseBatchScannerInputFormat here
-                    if (node instanceof ShardSubjectLookup) {
-                        System.out.println("Lookup: " + node);
-                        try {
-                            new SparqlCloudbaseIFTransformer((ShardSubjectLookup) node, queryConf, job, table,
-                                    userName, pwd, instance, zk);
-                        } catch (QueryEvaluationException e) {
-                            e.printStackTrace();
-                        }
-                    }
-                }
-            });
-
-
-            String resultOutPath = outputPath + "/result-" + count;
-            resultsOut[count] = resultOutPath;
-            Path outputDir = new Path(resultOutPath);
-            FileSystem dfs = FileSystem.get(outputDir.toUri(), queryConf);
-            if (dfs.exists(outputDir))
-                dfs.delete(outputDir, true);
-            job.setOutputFormatClass(SequenceFileOutputFormat.class);
-            SequenceFileOutputFormat.setOutputPath(job, outputDir);
-
-
-            // Submit the job
-            job.waitForCompletion(true);
-            count++;
-        }
-        return resultsOut;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformer.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformer.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformer.java
deleted file mode 100644
index 38c9ea5..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformer.java
+++ /dev/null
@@ -1,331 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.transform;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import com.google.common.io.ByteArrayDataOutput;
-import com.google.common.io.ByteStreams;
-import mvm.rya.cloudbase.utils.input.CloudbaseBatchScannerInputFormat;
-import mvm.mmrts.rdf.partition.mr.iterators.SortedEncodedRangeIterator;
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.Var;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeValue;
-
-import static mvm.mmrts.rdf.partition.mr.transform.SparqlCloudbaseIFTransformerConstants.*;
-
-/**
- * Class SparqlCloudbaseIFTransformer
- * Date: Sep 1, 2011
- * Time: 11:28:48 AM
- */
-public class SparqlCloudbaseIFTransformer {
-
-    protected Job job;
-
-    protected String userName;
-    protected String pwd;
-    protected String instance;
-    protected String zk;
-
-    protected ShardSubjectLookup lookup;
-//    protected Configuration configuration;
-    protected String table;
-
-    protected DateHashModShardValueGenerator generator;
-
-    public SparqlCloudbaseIFTransformer(ShardSubjectLookup lookup, Configuration configuration, Job job, String table,
-                                        String userName, String pwd, String instance, String zk) throws QueryEvaluationException {
-        this(lookup, configuration, job, table, userName, pwd, instance, zk, new DateHashModShardValueGenerator());
-    }
-
-    public SparqlCloudbaseIFTransformer(ShardSubjectLookup lookup, Configuration configuration, Job job, String table,
-                                        String userName, String pwd, String instance, String zk, DateHashModShardValueGenerator generator) throws QueryEvaluationException {
-        this.lookup = lookup;
-//        this.configuration = configuration;
-        this.table = table;
-        this.job = job;
-        this.userName = userName;
-        this.pwd = pwd;
-        this.instance = instance;
-        this.zk = zk;
-        this.generator = generator;
-
-        this.initialize();
-    }
-
-
-    public void initialize() throws QueryEvaluationException {
-        try {
-            /**
-             * Here we will set up the BatchScanner based on the lookup
-             */
-            Var subject = lookup.getSubject();
-            List<Map.Entry<Var, Var>> where = retrieveWhereClause();
-            List<Map.Entry<Var, Var>> select = retrieveSelectClause();
-
-            //global start-end time
-            long start = job.getConfiguration().getLong(START_BINDING, 0);
-            long end = job.getConfiguration().getLong(END_BINDING, System.currentTimeMillis());
-
-            int whereSize = where.size() + ((!isTimeRange(lookup, job.getConfiguration())) ? 0 : 1);
-
-            if (subject.hasValue()
-                    && where.size() == 0  /* Not using whereSize, because we can set up the TimeRange in the scanner */
-                    && select.size() == 0) {
-                /**
-                 * Case 1: Subject is set, but predicate, object are not.
-                 * Return all for the subject
-                 */
-//                this.scanner = scannerForSubject((URI) subject.getValue());
-//                if (this.scanner == null) {
-//                    this.iter = new EmptyIteration();
-//                    return;
-//                }
-//                Map.Entry<Var, Var> predObj = lookup.getPredicateObjectPairs().get(0);
-//                this.iter = new SelectAllIterator(this.bindings, this.scanner.iterator(), predObj.getKey(), predObj.getValue());
-                throw new UnsupportedOperationException("Query Case not supported");
-            } else if (subject.hasValue()
-                    && where.size() == 0 /* Not using whereSize, because we can set up the TimeRange in the scanner */) {
-                /**
-                 * Case 2: Subject is set, and a few predicates are set, but no objects
-                 * Return all, and filter which predicates you are interested in
-                 */
-//                this.scanner = scannerForSubject((URI) subject.getValue());
-//                if (this.scanner == null) {
-//                    this.iter = new EmptyIteration();
-//                    return;
-//                }
-//                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-                throw new UnsupportedOperationException("Query Case not supported");
-            } else if (subject.hasValue()
-                    && where.size() >= 1 /* Not using whereSize, because we can set up the TimeRange in the scanner */) {
-                /**
-                 * Case 2a: Subject is set, and a few predicates are set, and one object
-                 * TODO: For now we will ignore the predicate-object filter because we do not know how to query for this
-                 */
-//                this.scanner = scannerForSubject((URI) subject.getValue());
-//                if (this.scanner == null) {
-//                    this.iter = new EmptyIteration();
-//                    return;
-//                }
-//                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-                throw new UnsupportedOperationException("Query Case not supported");
-            } else if (!subject.hasValue() && whereSize > 1) {
-                /**
-                 * Case 3: Subject is not set, more than one where clause
-                 */
-                scannerForPredicateObject(lookup, start, end, where);
-                setSelectFilter(subject, select);
-            } else if (!subject.hasValue() && whereSize == 1) {
-                /**
-                 * Case 4: No subject, only one where clause
-                 */
-                Map.Entry<Var, Var> predObj = null;
-                if (where.size() == 1) {
-                    predObj = where.get(0);
-                }
-                scannerForPredicateObject(lookup, start, end, predObj);
-                setSelectFilter(subject, select);
-            } else if (!subject.hasValue() && whereSize == 0 && select.size() > 1) {
-                /**
-                 * Case 5: No subject, no where (just 1 select)
-                 */
-//                this.scanner = scannerForPredicates(start, end, select);
-//                if (this.scanner == null) {
-//                    this.iter = new EmptyIteration();
-//                    return;
-//                }
-//                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-                throw new UnsupportedOperationException("Query Case not supported");
-            } else if (!subject.hasValue() && whereSize == 0 && select.size() == 1) {
-                /**
-                 * Case 5: No subject, no where (just 1 select)
-                 */
-//                cloudbase.core.client.Scanner sc = scannerForPredicate(start, end, (URI) select.get(0).getKey().getValue());
-//                if (sc == null) {
-//                    this.iter = new EmptyIteration();
-//                    return;
-//                }
-//                this.iter = new FilterIterator(this.bindings, sc.iterator(), subject, select);
-                throw new UnsupportedOperationException("Query Case not supported");
-            } else {
-                throw new QueryEvaluationException("Case not supported as of yet");
-            }
-
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    protected void setSelectFilter(Var subj, List<Map.Entry<Var, Var>> select) {
-        List<String> selectStrs = new ArrayList<String>();
-        for (Map.Entry<Var, Var> entry : select) {
-            Var key = entry.getKey();
-            Var obj = entry.getValue();
-            if (key.hasValue()) {
-                String pred_s = key.getValue().stringValue();
-                selectStrs.add(pred_s);
-                job.getConfiguration().set(pred_s, obj.getName());
-            }
-        }
-        job.getConfiguration().setStrings(SELECT_FILTER, selectStrs.toArray(new String[selectStrs.size()]));
-        job.getConfiguration().set(SUBJECT_NAME, subj.getName());
-    }
-
-    protected List<Map.Entry<Var, Var>> retrieveWhereClause() {
-        List<Map.Entry<Var, Var>> where = new ArrayList<Map.Entry<Var, Var>>();
-        for (Map.Entry<Var, Var> entry : lookup.getPredicateObjectPairs()) {
-            Var pred = entry.getKey();
-            Var object = entry.getValue();
-            if (pred.hasValue() && object.hasValue()) {
-                where.add(entry); //TODO: maybe we should clone this?
-            }
-        }
-        return where;
-    }
-
-    protected List<Map.Entry<Var, Var>> retrieveSelectClause() {
-        List<Map.Entry<Var, Var>> select = new ArrayList<Map.Entry<Var, Var>>();
-        for (Map.Entry<Var, Var> entry : lookup.getPredicateObjectPairs()) {
-            Var pred = entry.getKey();
-            Var object = entry.getValue();
-            if (pred.hasValue() && !object.hasValue()) {
-                select.add(entry); //TODO: maybe we should clone this?
-            }
-        }
-        return select;
-    }
-
-    protected void scannerForPredicateObject(ShardSubjectLookup lookup, Long start, Long end, List<Map.Entry<Var, Var>> predObjs) throws IOException, TableNotFoundException {
-        start = validateFillStartTime(start, lookup);
-        end = validateFillEndTime(end, lookup);
-
-        int extra = 0;
-
-        if (isTimeRange(lookup, job.getConfiguration())) {
-            extra += 1;
-        }
-
-        Text[] queries = new Text[predObjs.size() + extra];
-        for (int i = 0; i < predObjs.size(); i++) {
-            Map.Entry<Var, Var> predObj = predObjs.get(i);
-            ByteArrayDataOutput output = ByteStreams.newDataOutput();
-            writeValue(output, predObj.getKey().getValue());
-            output.write(INDEX_DELIM);
-            writeValue(output, predObj.getValue().getValue());
-            queries[i] = new Text(output.toByteArray());
-        }
-
-        if (isTimeRange(lookup, job.getConfiguration())) {
-            queries[queries.length - 1] = new Text(
-                    GMDenIntersectingIterator.getRangeTerm(INDEX.toString(),
-                            getStartTimeRange(lookup, job.getConfiguration())
-                            , true,
-                            getEndTimeRange(lookup, job.getConfiguration()),
-                            true
-                    )
-            );
-        }
-
-        createBatchScannerInputFormat();
-        CloudbaseBatchScannerInputFormat.setIterator(job, 20, GMDenIntersectingIterator.class.getName(), "ii");
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ii", GMDenIntersectingIterator.docFamilyOptionName, DOC.toString());
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ii", GMDenIntersectingIterator.indexFamilyOptionName, INDEX.toString());
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(queries));
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + true);
-
-        Range range = new Range(
-                new Key(new Text(generator.generateShardValue(start, null) + "\0")),
-                new Key(new Text(generator.generateShardValue(end, null) + "\uFFFD"))
-        );
-        CloudbaseBatchScannerInputFormat.setRanges(job, Collections.singleton(
-                range
-        ));
-    }
-
-    protected void scannerForPredicateObject(ShardSubjectLookup lookup, Long start, Long end, Map.Entry<Var, Var> predObj) throws IOException, TableNotFoundException {
-        start = validateFillStartTime(start, lookup);
-        end = validateFillEndTime(end, lookup);
-
-        /**
-         * Need to use GMDen because SortedRange can't serialize non xml characters in range
-         * @see https://issues.apache.org/jira/browse/MAPREDUCE-109
-         */
-        createBatchScannerInputFormat();
-        CloudbaseBatchScannerInputFormat.setIterator(job, 20, SortedEncodedRangeIterator.class.getName(), "ri");
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ri", SortedRangeIterator.OPTION_DOC_COLF, DOC.toString());
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ri", SortedRangeIterator.OPTION_COLF, INDEX.toString());
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ri", SortedRangeIterator.OPTION_START_INCLUSIVE, "" + true);
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ri", SortedRangeIterator.OPTION_END_INCLUSIVE, "" + true);
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + true);
-
-        String lower, upper = null;
-        if (isTimeRange(lookup, job.getConfiguration())) {
-            lower = getStartTimeRange(lookup, job.getConfiguration());
-            upper = getEndTimeRange(lookup, job.getConfiguration());
-        } else {
-
-            ByteArrayDataOutput output = ByteStreams.newDataOutput();
-            writeValue(output, predObj.getKey().getValue());
-            output.write(INDEX_DELIM);
-            writeValue(output, predObj.getValue().getValue());
-
-            lower = new String(output.toByteArray());
-            upper = lower + "\01";
-        }
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ri", SortedRangeIterator.OPTION_LOWER_BOUND, SortedEncodedRangeIterator.encode(lower));
-        CloudbaseBatchScannerInputFormat.setIteratorOption(job, "ri", SortedRangeIterator.OPTION_UPPER_BOUND, SortedEncodedRangeIterator.encode(upper));
-
-        //TODO: Do we add a time predicate to this?
-//        bs.setScanIterators(19, FilteringIterator.class.getName(), "filteringIterator");
-//        bs.setScanIteratorOption("filteringIterator", "0", TimeRangeFilter.class.getName());
-//        bs.setScanIteratorOption("filteringIterator", "0." + TimeRangeFilter.TIME_RANGE_PROP, (end - start) + "");
-//        bs.setScanIteratorOption("filteringIterator", "0." + TimeRangeFilter.START_TIME_PROP, end + "");
-
-        Range range = new Range(
-                new Key(new Text(generator.generateShardValue(start, null) + "\0")),
-                new Key(new Text(generator.generateShardValue(end, null) + "\uFFFD"))
-        );
-        CloudbaseBatchScannerInputFormat.setRanges(job, Collections.singleton(
-                range
-        ));
-
-    }
-
-    protected void createBatchScannerInputFormat() {
-        job.setInputFormatClass(CloudbaseBatchScannerInputFormat.class);
-        CloudbaseBatchScannerInputFormat.setInputInfo(job, userName, pwd.getBytes(), table, CBConstants.NO_AUTHS); //may need to change these auths sometime soon
-        CloudbaseBatchScannerInputFormat.setZooKeeperInstance(job, instance, zk);
-        job.setMapperClass(KeyValueToMapWrMapper.class);
-        job.setCombinerClass(AggregateTriplesBySubjectCombiner.class);
-        job.setReducerClass(AggregateTriplesBySubjectReducer.class);
-
-        job.setMapOutputKeyClass(Text.class);
-        job.setMapOutputValueClass(MapWritable.class);
-        job.setOutputKeyClass(LongWritable.class);
-        job.setOutputValueClass(MapWritable.class);
-
-        job.getConfiguration().set("io.sort.mb", "256");
-        job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", false);
-        job.getConfiguration().setBoolean("mapred.reduce.tasks.speculative.execution", false);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformerConstants.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformerConstants.java b/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformerConstants.java
deleted file mode 100644
index 84f83c0..0000000
--- a/partition/mr.partition.rdf/src/main/java/mvm/mmrts/rdf/partition/mr/transform/SparqlCloudbaseIFTransformerConstants.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.transform;
-
-/**
- * Class SparqlCloudbaseIFTransformerConstants
- * Date: Sep 1, 2011
- * Time: 5:01:10 PM
- */
-public class SparqlCloudbaseIFTransformerConstants {
-    public static final String PREFIX = "mvm.mmrts.rdf.partition.mr.transform.";
-    public static final String SELECT_FILTER = PREFIX + "select";
-    public static final String SUBJECT_NAME = PREFIX + "subject";
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatToolTest.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatToolTest.java b/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatToolTest.java
deleted file mode 100644
index effb9ff..0000000
--- a/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/compat/ChangeShardDateFormatToolTest.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.compat;
-
-import junit.framework.TestCase;
-
-/**
- * Class ChangeShardDateFormatToolTest
- * Date: Dec 9, 2011
- * Time: 10:39:31 AM
- */
-public class ChangeShardDateFormatToolTest extends TestCase {
-
-    public void testShardDelim() throws Exception {
-        String dateDelim = "-";
-        String shard = "2011-11-01";
-        int shardIndex = shard.lastIndexOf(dateDelim);
-        if (shardIndex == -1)
-            fail();
-        String date = shard.substring(0, shardIndex);
-        shard = shard.substring(shardIndex + 1, shard.length());
-        assertEquals("2011-11", date);
-        assertEquals("01", shard);
-
-        dateDelim = "_";
-        shard = "20111101_33";
-        shardIndex = shard.lastIndexOf(dateDelim);
-        if (shardIndex == -1)
-            fail();
-        date = shard.substring(0, shardIndex);
-        shard = shard.substring(shardIndex + 1, shard.length());
-        assertEquals("20111101", date);
-        assertEquals("33", shard);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToolTest.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToolTest.java b/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToolTest.java
deleted file mode 100644
index c279348..0000000
--- a/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/RdfFileInputToolTest.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.fileinput;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.data.ColumnUpdate;
-import cloudbase.core.data.Mutation;
-import junit.framework.TestCase;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
-import org.apache.hadoop.mrunit.types.Pair;
-import org.apache.zookeeper.ZooKeeper;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-import java.util.Collection;
-import java.util.List;
-
-/**
- * Class RdfFileInputToolTest
- * Date: Aug 8, 2011
- * Time: 3:22:25 PM
- */
-public class RdfFileInputToolTest extends TestCase {
-
-    ValueFactory vf = ValueFactoryImpl.getInstance();
-
-    /**
-     * MRUnit for latest mapreduce (0.21 api)
-     * <p/>
-     * 1. Test to see if the bytes overwrite will affect
-     */
-
-    private Mapper<LongWritable, BytesWritable, Text, BytesWritable> mapper = new RdfFileInputToCloudbaseTool.OutSubjStmtMapper();
-    private Reducer<Text, BytesWritable, Text, Mutation> reducer = new RdfFileInputToCloudbaseTool.StatementToMutationReducer();
-    private MapReduceDriver<LongWritable, BytesWritable, Text, BytesWritable, Text, Mutation> driver;
-
-    @Override
-    protected void setUp() throws Exception {
-        super.setUp();
-        driver = new MapReduceDriver(mapper, reducer);
-        Configuration conf = new Configuration();
-        conf.set(RdfFileInputToCloudbaseTool.CB_TABLE_PROP, "table");
-        driver.setConfiguration(conf);
-    }
-
-    public void testNormalRun() throws Exception {
-        StatementImpl stmt1 = new StatementImpl(vf.createURI("urn:namespace#subject"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
-        StatementImpl stmt2 = new StatementImpl(vf.createURI("urn:namespace#subject"), vf.createURI("urn:namespace#pred"), vf.createLiteral("obje"));
-        StatementImpl stmt3 = new StatementImpl(vf.createURI("urn:namespace#subj2"), vf.createURI("urn:namespace#pred"), vf.createLiteral("ob"));
-        List<Pair<Text, Mutation>> pairs = driver.
-                withInput(new LongWritable(1), new BytesWritable(RdfIO.writeStatement(stmt1, true))).
-                withInput(new LongWritable(1), new BytesWritable(RdfIO.writeStatement(stmt2, true))).
-                withInput(new LongWritable(1), new BytesWritable(RdfIO.writeStatement(stmt3, true))).
-                run();
-
-        assertEquals(4, pairs.size());
-
-        ColumnUpdate update = pairs.get(0).getSecond().getUpdates().get(0);
-        assertEquals("event", new String(update.getColumnFamily()));
-        assertEquals("\07urn:namespace#subj2\0\07urn:namespace#pred\0\u0009ob", new String(update.getColumnQualifier()));
-    }
-
-    public static void main(String[] args) {
-        try {
-            Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes());
-            Collection<Text> splits = connector.tableOperations().getSplits("partitionRdf", Integer.MAX_VALUE);
-            System.out.println(splits.size());
-            System.out.println(splits);
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitionerTest.java
----------------------------------------------------------------------
diff --git a/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitionerTest.java b/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitionerTest.java
deleted file mode 100644
index bd63f6f..0000000
--- a/partition/mr.partition.rdf/src/test/java/mvm/mmrts/rdf/partition/mr/fileinput/bulk/EmbedKeyRangePartitionerTest.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package mvm.mmrts.rdf.partition.mr.fileinput.bulk;
-
-import junit.framework.TestCase;
-import org.apache.hadoop.io.Text;
-
-/**
- * Class EmbedKeyRangePartitionerTest
- * Date: Sep 13, 2011
- * Time: 1:58:28 PM
- */
-public class EmbedKeyRangePartitionerTest extends TestCase {
-
-    public void testRetrieveEmbedKey() throws Exception {
-        assertEquals(new Text("hello"), EmbedKeyRangePartitioner.retrieveEmbedKey(new Text("hello\1there")));
-        assertEquals(new Text("h"), EmbedKeyRangePartitioner.retrieveEmbedKey(new Text("h\1there")));
-        assertEquals(new Text(""), EmbedKeyRangePartitioner.retrieveEmbedKey(new Text("\1there")));
-        assertEquals(new Text("hello there"), EmbedKeyRangePartitioner.retrieveEmbedKey(new Text("hello there")));
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/pom.xml
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/pom.xml b/partition/partition.rdf/pom.xml
deleted file mode 100644
index 2701d64..0000000
--- a/partition/partition.rdf/pom.xml
+++ /dev/null
@@ -1,281 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
-        <version>3.0.0.alpha1</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <groupId>mvm.mmrts.rdf</groupId>
-    <artifactId>partition.rdf</artifactId>
-    <version>1.0.0-SNAPSHOT</version>
-    <name>${project.groupId}.${project.artifactId}</name>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-runtime</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryresultio-sparqlxml</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-rdfxml</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>sitestore.common</groupId>
-            <artifactId>common-query</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>cloudbase.utils</artifactId>
-        </dependency>
-
-        <!-- Cloudbase deps -->
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>zookeeper</artifactId>
-        </dependency>
-
-        <!-- Test -->
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <!-- Deps that are transitive but listed anyway
-
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-model</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-query</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryalgebra-model</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryparser-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryparser-serql</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryparser-sparql</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryparser-serql</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryresultio-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryresultio-binary</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryresultio-sparqljson</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryresultio-text</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-repository-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-repository-manager</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-repository-event</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-repository-sail</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-sail-memory</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-sail-inferencer</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryalgebra-evaluation</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-repository-http</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-http-client</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-repository-contextaware</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-repository-dataset</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-http-protocol</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-ntriples</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-n3</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-trix</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-turtle</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-rio-trig</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-sail-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-sail-nativerdf</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-sail-rdbms</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-collections</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-iteration</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-io</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-lang</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-i18n</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-concurrent</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-xml</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-text</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>info.aduna.commons</groupId>
-            <artifactId>aduna-commons-net</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-dbcp</groupId>
-            <artifactId>commons-dbcp</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-pool</groupId>
-            <artifactId>commons-pool</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-        </dependency>  -->
-
-    </dependencies>
-    <repositories>
-        <repository>
-            <releases>
-                <enabled>true</enabled>
-            </releases>
-            <snapshots>
-                <enabled>false</enabled>
-            </snapshots>
-            <id>aduna-opensource.releases</id>
-            <name>Aduna Open Source - Maven releases</name>
-            <url>http://repo.aduna-software.org/maven2/releases</url>
-        </repository>
-    </repositories>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <configuration>
-                    <source>1.6</source>
-                    <target>1.6</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <excludes>
-                        <exclude>**/*IntegrationTest.java
-                        </exclude>
-                    </excludes>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/InvalidValueTypeMarkerRuntimeException.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/InvalidValueTypeMarkerRuntimeException.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/InvalidValueTypeMarkerRuntimeException.java
deleted file mode 100644
index 0c723a1..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/InvalidValueTypeMarkerRuntimeException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-/**
- * Class InvalidValueTypeMarkerRuntimeException
- * Date: Jan 7, 2011
- * Time: 12:58:27 PM
- */
-public class InvalidValueTypeMarkerRuntimeException extends RuntimeException {
-    private int valueTypeMarker = -1;
-
-    public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker) {
-        super();
-        this.valueTypeMarker = valueTypeMarker;
-    }
-
-    public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker, String s) {
-        super(s);
-        this.valueTypeMarker = valueTypeMarker;
-    }
-
-    public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker, String s, Throwable throwable) {
-        super(s, throwable);
-        this.valueTypeMarker = valueTypeMarker;
-    }
-
-    public InvalidValueTypeMarkerRuntimeException(int valueTypeMarker, Throwable throwable) {
-        super(throwable);
-        this.valueTypeMarker = valueTypeMarker;
-    }
-
-    public int getValueTypeMarker() {
-        return valueTypeMarker;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConnection.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConnection.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConnection.java
deleted file mode 100644
index 83e0675..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConnection.java
+++ /dev/null
@@ -1,306 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.admin.TableOperations;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Range;
-import cloudbase.core.security.ColumnVisibility;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Multimap;
-import info.aduna.iteration.CloseableIteration;
-import mvm.mmrts.rdf.partition.converter.ContextColVisConverter;
-import mvm.mmrts.rdf.partition.iterators.NamespaceIterator;
-import mvm.mmrts.rdf.partition.query.evaluation.FilterTimeIndexVisitor;
-import mvm.mmrts.rdf.partition.query.evaluation.PartitionEvaluationStrategy;
-import mvm.mmrts.rdf.partition.query.evaluation.SubjectGroupingOptimizer;
-import mvm.mmrts.rdf.partition.shard.ShardValueGenerator;
-import mvm.mmrts.rdf.partition.utils.ContextsStatementImpl;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.openrdf.model.*;
-import org.openrdf.query.Binding;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.QueryRoot;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.impl.EmptyBindingSet;
-import org.openrdf.sail.SailException;
-import org.openrdf.sail.helpers.SailConnectionBase;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Map;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeStatement;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeValue;
-
-/**
- * Class PartitionConnection
- * Date: Jul 6, 2011
- * Time: 4:40:49 PM
- * <p/>
- * Ingest:
- * Triple ->
- * - <subject> <shard>:
- * - <shard> event:<subject>\0<predicate>\0<object>
- * - <shard> index:<predicate>\1<object>\0
- * <p/>
- * Namespace ->
- * - <prefix> ns:<namespace>
- */
-public class PartitionConnection extends SailConnectionBase {
-
-    private PartitionSail sail;
-    private BatchWriter writer;
-    private BatchWriter shardTableWriter;   //MMRTS-148
-    
-    private Multimap<Resource, ContextsStatementImpl> statements = HashMultimap.create(10000, 10);
-
-
-    public PartitionConnection(PartitionSail sailBase) throws SailException {
-        super(sailBase);
-        this.sail = sailBase;
-        this.initialize();
-    }
-
-    protected void initialize() throws SailException {
-        try {
-            Connector connector = sail.connector;
-            String table = sail.table;
-            String shardTable = sail.shardTable;
-
-            //create these tables if they do not exist
-            TableOperations tableOperations = connector.tableOperations();
-            boolean tableExists = tableOperations.exists(table);
-            if (!tableExists)
-                tableOperations.create(table);
-
-            tableExists = tableOperations.exists(shardTable);
-            if(!tableExists)
-                tableOperations.create(shardTable);
-
-            writer = connector.createBatchWriter(table, 1000000l, 60000l, 10);
-            shardTableWriter = connector.createBatchWriter(shardTable, 1000000l, 60000l, 10);
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    protected void closeInternal() throws SailException {
-        try {
-            writer.close();
-            shardTableWriter.close();
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(TupleExpr tupleExpr, Dataset dataset, BindingSet bindingSet, boolean b) throws SailException {
-//        throw new UnsupportedOperationException("Query not supported");
-
-        if (!(tupleExpr instanceof QueryRoot))
-            tupleExpr = new QueryRoot(tupleExpr);
-
-        try {
-            Configuration queryConf = populateConf(bindingSet);
-            //timeRange filter check
-            tupleExpr.visit(new FilterTimeIndexVisitor(queryConf));
-
-            (new SubjectGroupingOptimizer(queryConf)).optimize(tupleExpr, dataset, bindingSet);
-            PartitionTripleSource source = new PartitionTripleSource(this.sail, queryConf);
-
-            PartitionEvaluationStrategy strategy = new PartitionEvaluationStrategy(
-                    source, dataset);
-
-            return strategy.evaluate(tupleExpr, EmptyBindingSet.getInstance());
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-
-    }
-
-    protected Configuration populateConf(BindingSet bs) {
-        Configuration conf = new Configuration(this.sail.conf);
-
-        for (String bname : bs.getBindingNames()) {
-            conf.set(bname, bs.getValue(bname).stringValue());
-        }
-        Binding start = bs.getBinding(START_BINDING);
-        if (start != null)
-            conf.setLong(START_BINDING, Long.parseLong(start.getValue().stringValue()));
-
-        Binding end = bs.getBinding(END_BINDING);
-        if (end != null)
-            conf.setLong(END_BINDING, Long.parseLong(end.getValue().stringValue()));
-
-        Binding timePredicate = bs.getBinding(TIME_PREDICATE);
-        if (timePredicate != null)
-            conf.set(TIME_PREDICATE, timePredicate.getValue().stringValue());
-
-        Binding timeType = bs.getBinding(TIME_TYPE_PROP);
-        if (timeType != null)
-            conf.set(TIME_TYPE_PROP, timeType.getValue().stringValue());
-        else if (timePredicate != null)
-            conf.set(TIME_TYPE_PROP, TimeType.XMLDATETIME.name()); //default to xml datetime
-
-        return conf;
-    }
-
-    @Override
-    protected CloseableIteration<? extends Resource, SailException> getContextIDsInternal() throws SailException {
-        throw new UnsupportedOperationException("Contexts not supported");
-    }
-
-    @Override
-    protected CloseableIteration<? extends Statement, SailException> getStatementsInternal(Resource resource, URI uri, Value value, boolean b, Resource... resources) throws SailException {
-        throw new UnsupportedOperationException("Query not supported");
-    }
-
-    @Override
-    protected long sizeInternal(Resource... resources) throws SailException {
-        throw new UnsupportedOperationException("Size operation not supported");
-    }
-
-    @Override
-    protected void startTransactionInternal() throws SailException {
-        // no transaction support as of yet
-    }
-
-    @Override
-    protected void commitInternal() throws SailException {
-        try {
-            ShardValueGenerator gen = sail.generator;
-            ContextColVisConverter contextColVisConverter = sail.contextColVisConverter;
-            Map<Resource, Collection<ContextsStatementImpl>> map = statements.asMap();
-            for (Map.Entry<Resource, Collection<ContextsStatementImpl>> entry : map.entrySet()) {
-                Resource subject = entry.getKey();
-                byte[] subj_bytes = writeValue(subject);
-                String shard = gen.generateShardValue(subject);
-                Text shard_txt = new Text(shard);
-                Collection<ContextsStatementImpl> stmts = entry.getValue();
-
-                /**
-                 * Triple - >
-                 *- < subject ><shard >:
-                 *- < shard > event:<subject >\0 < predicate >\0 < object >
-                 *- < shard > index:<predicate >\1 < object >\0
-                 */
-                Mutation m_subj = new Mutation(shard_txt);
-                for (ContextsStatementImpl stmt : stmts) {
-                    Resource[] contexts = stmt.getContexts();
-                    ColumnVisibility vis = null;
-                    if (contexts != null && contexts.length > 0 && contextColVisConverter != null) {
-                        vis = contextColVisConverter.convertContexts(contexts);
-                    }
-
-                    if (vis != null) {
-                        m_subj.put(DOC, new Text(writeStatement(stmt, true)), vis, EMPTY_VALUE);
-                        m_subj.put(INDEX, new Text(writeStatement(stmt, false)), vis, EMPTY_VALUE);
-                    } else {
-                        m_subj.put(DOC, new Text(writeStatement(stmt, true)), EMPTY_VALUE);
-                        m_subj.put(INDEX, new Text(writeStatement(stmt, false)), EMPTY_VALUE);
-                    }
-                }
-
-                /**
-                 * TODO: Is this right?
-                 * If the subject does not have any authorizations specified, then anyone can access it.
-                 * But the true authorization check will happen at the predicate/object level, which means that
-                 * the set returned will only be what the person is authorized to see.  The shard lookup table has to
-                 * have the lowest level authorization all the predicate/object authorizations; otherwise,
-                 * a user may not be able to see the correct document.   
-                 */
-                Mutation m_shard = new Mutation(new Text(subj_bytes));
-                m_shard.put(shard_txt, EMPTY_TXT, EMPTY_VALUE);
-                shardTableWriter.addMutation(m_shard);
-
-                writer.addMutation(m_subj);
-            }
-
-            writer.flush();
-            shardTableWriter.flush();
-            statements.clear();
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-        finally {
-        }
-    }
-
-    @Override
-    protected void rollbackInternal() throws SailException {
-        statements.clear();
-    }
-
-    @Override
-    protected void addStatementInternal(Resource subject, URI predicate, Value object, Resource... contexts) throws SailException {
-        statements.put(subject, new ContextsStatementImpl(subject, predicate, object, contexts));
-    }
-
-    @Override
-    protected void removeStatementsInternal(Resource resource, URI uri, Value value, Resource... contexts) throws SailException {
-        throw new UnsupportedOperationException("Remove not supported as of yet");
-    }
-
-    @Override
-    protected void clearInternal(Resource... resources) throws SailException {
-        throw new UnsupportedOperationException("Clear with context not supported as of yet");
-    }
-
-    @Override
-    protected CloseableIteration<? extends Namespace, SailException> getNamespacesInternal() throws SailException {
-        return new NamespaceIterator(sail.connector, sail.table);
-    }
-
-    @Override
-    protected String getNamespaceInternal(String prefix) throws SailException {
-        try {
-            Scanner scanner = sail.connector.createScanner(sail.table, ALL_AUTHORIZATIONS);
-            scanner.setRange(new Range(new Text(prefix)));
-            scanner.fetchColumnFamily(NAMESPACE);
-            Iterator<Map.Entry<Key, cloudbase.core.data.Value>> iter = scanner.iterator();
-            if (iter != null && iter.hasNext())
-                return iter.next().getKey().getColumnQualifier().toString();
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-        return null;
-    }
-
-    @Override
-    protected void setNamespaceInternal(String prefix, String namespace) throws SailException {
-        /**
-         * Namespace ->
-         * - <prefix> <namespace>:
-         */
-
-        try {
-            Mutation m = new Mutation(new Text(prefix));
-            m.put(NAMESPACE, new Text(namespace), EMPTY_VALUE);
-            writer.addMutation(m);
-        } catch (Exception e) {
-            throw new SailException(e);
-        }
-    }
-
-    @Override
-    protected void removeNamespaceInternal
-            (String
-                    s) throws SailException {
-        throw new UnsupportedOperationException("Namespace remove not supported");
-    }
-
-    @Override
-    protected void clearNamespacesInternal
-            () throws SailException {
-        throw new UnsupportedOperationException("Namespace Clear not supported");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConstants.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConstants.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConstants.java
deleted file mode 100644
index cb69596..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionConstants.java
+++ /dev/null
@@ -1,141 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.openrdf.model.Literal;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-/**
- * Class PartitionConstants
- * Date: Jul 6, 2011
- * Time: 12:22:55 PM
- */
-public class PartitionConstants {
-
-    public static final String PARTITION_NS = "urn:mvm.mmrts.partition.rdf/08/2011#";
-    public static ValueFactory VALUE_FACTORY = ValueFactoryImpl.getInstance();
-    public static URI TIMERANGE = VALUE_FACTORY.createURI(PARTITION_NS, "timeRange");
-    public static URI SHARDRANGE = VALUE_FACTORY.createURI(PARTITION_NS, "shardRange"); //shardRange(subject, start, stop) in ms
-    public static Literal EMPTY_LITERAL = VALUE_FACTORY.createLiteral(0);
-
-    public static final byte FAMILY_DELIM = 0;
-    public static final String FAMILY_DELIM_STR = "\0";
-    public static final byte INDEX_DELIM = 1;
-    public static final String INDEX_DELIM_STR = "\1";
-
-    /* RECORD TYPES */
-//    public static final int NAMESPACE_MARKER = 2;
-//
-//    public static final int EXPL_TRIPLE_MARKER = 3;
-//
-//    public static final int EXPL_QUAD_MARKER = 4;
-//
-//    public static final int INF_TRIPLE_MARKER = 5;
-//
-//    public static final int INF_QUAD_MARKER = 6;
-
-    public static final int URI_MARKER = 7;
-
-    public static final String URI_MARKER_STR = "\07";
-
-    public static final int BNODE_MARKER = 8;
-
-    public static final int PLAIN_LITERAL_MARKER = 9;
-
-    public static final String PLAIN_LITERAL_MARKER_STR = "\u0009";
-
-    public static final int LANG_LITERAL_MARKER = 10;
-
-    public static final int DATATYPE_LITERAL_MARKER = 11;
-
-    public static final String DATATYPE_LITERAL_MARKER_STR = "\u000B";
-
-    public static final int EOF_MARKER = 127;
-
-    //	public static final Authorizations ALL_AUTHORIZATIONS = new Authorizations(
-    //	"_");
-    public static final Authorizations ALL_AUTHORIZATIONS = CBConstants.NO_AUTHS;
-
-    public static final Value EMPTY_VALUE = new Value(new byte[0]);
-    public static final Text EMPTY_TXT = new Text("");
-
-    /* Column Families and Qualifiers */
-    public static final Text INDEX = new Text("index");
-    public static final Text DOC = new Text("event");
-    public static final Text NAMESPACE = new Text("ns");
-
-    /* Time constants */
-    public static final String START_BINDING = "binding.start";
-    public static final String END_BINDING = "binding.end";
-    public static final String TIME_PREDICATE = "binding.timePredicate";
-    public static final String SHARDRANGE_BINDING = "binding.shardRange";
-    public static final String SHARDRANGE_START = "binding.shardRange.start";
-    public static final String SHARDRANGE_END = "binding.shardRange.end";
-    public static final String TIME_TYPE_PROP = "binding.timeProp";
-    public static final String AUTHORIZATION_PROP = "binding.authorization";
-    public static final String NUMTHREADS_PROP = "binding.numthreads";
-    public static final String ALLSHARDS_PROP = "binding.allshards";
-
-    public static final String VALUE_DELIMITER = "\03";
-
-    public static final SimpleDateFormat XMLDATE = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
-
-    public enum TimeType {
-        TIMESTAMP, XMLDATETIME
-    }
-
-    public static boolean isTimeRange(ShardSubjectLookup lookup, Configuration configuration) {
-        return (configuration.get(TIME_PREDICATE) != null) || (lookup.getTimePredicate() != null);
-    }
-
-    public static Long validateFillStartTime(Long start, ShardSubjectLookup lookup) {
-        if (lookup.getShardStartTimeRange() != null)
-            return Long.parseLong(lookup.getShardEndTimeRange());
-        return (start == null) ? 0l : start;
-    }
-
-    public static Long validateFillEndTime(Long end, ShardSubjectLookup lookup) {
-        if (lookup.getShardEndTimeRange() != null)
-            return Long.parseLong(lookup.getShardEndTimeRange());
-        return (end == null) ? System.currentTimeMillis() : end;
-    }
-
-    public static String getStartTimeRange(ShardSubjectLookup lookup, Configuration configuration) {
-        String tp = configProperty(configuration, TIME_PREDICATE, lookup.getTimePredicate());
-        String st = configProperty(configuration, START_BINDING, lookup.getStartTimeRange());
-        TimeType tt = lookup.getTimeType();
-        if (tt == null)
-            tt = TimeType.valueOf(configuration.get(TIME_TYPE_PROP));
-        return URI_MARKER_STR + tp + INDEX_DELIM_STR + convertTime(Long.parseLong(st), tt);
-    }
-
-    public static String getEndTimeRange(ShardSubjectLookup lookup, Configuration configuration) {
-        String tp = configProperty(configuration, TIME_PREDICATE, lookup.getTimePredicate());
-        String et = configProperty(configuration, END_BINDING, lookup.getEndTimeRange());
-        TimeType tt = lookup.getTimeType();
-        if (tt == null)
-            tt = TimeType.valueOf(configuration.get(TIME_TYPE_PROP));
-        return URI_MARKER_STR + tp + INDEX_DELIM_STR + convertTime(Long.parseLong(et), tt);
-    }
-
-    public static String convertTime(Long timestamp, TimeType timeType) {
-        return (TimeType.XMLDATETIME.equals(timeType))
-                ? (DATATYPE_LITERAL_MARKER_STR + XMLDATE.format(new Date(timestamp)))
-                : PLAIN_LITERAL_MARKER_STR + timestamp;
-    }
-
-    public static String configProperty(Configuration configuration, String property, String checkValue) {
-        if (checkValue == null)
-            return configuration.get(property);
-        return checkValue;
-    }
-}


[51/56] [abbrv] incubator-rya git commit: RYA-13 Add delete support to secondary indices

Posted by mi...@apache.org.
RYA-13 Add delete support to secondary indices


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/e5e227c1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/e5e227c1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/e5e227c1

Branch: refs/heads/master
Commit: e5e227c159fdcdb3ccc05af0049b35f78aa4831e
Parents: 80faf06
Author: ejwhite922 <er...@sparta.com>
Authored: Fri Dec 4 16:35:23 2015 -0500
Committer: ejwhite922 <er...@sparta.com>
Committed: Fri Dec 4 16:35:23 2015 -0500

----------------------------------------------------------------------
 .../java/mvm/rya/accumulo/AccumuloRyaDAO.java   |  81 ++--
 .../accumulo/entity/EntityCentricIndex.java     |  64 ++-
 .../freetext/AccumuloFreeTextIndexer.java       | 238 +++++++---
 .../accumulo/geo/GeoMesaGeoIndexer.java         |  90 ++--
 .../temporal/AccumuloTemporalIndexer.java       | 216 ++++++---
 .../freetext/AccumuloFreeTextIndexerTest.java   | 239 ++++++----
 .../indexing/accumulo/geo/GeoIndexerTest.java   | 451 ++++++++++---------
 .../temporal/AccumuloTemporalIndexerTest.java   | 249 +++++-----
 .../src/main/java/EntityDirectExample.java      | 121 +++--
 .../src/main/java/RyaDirectExample.java         | 277 ++++++++----
 10 files changed, 1212 insertions(+), 814 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
index 764ca80..84fae68 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
@@ -31,7 +31,6 @@ import static mvm.rya.api.RdfCloudTripleStoreConstants.NUM_THREADS;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.RTS_SUBJECT_RYA;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.RTS_VERSION_PREDICATE_RYA;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.VERSION_RYA;
-import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collection;
 import java.util.Collections;
@@ -40,21 +39,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.accumulo.query.AccumuloRyaQueryEngine;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.layout.TableLayoutStrategy;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.RyaNamespaceManager;
-import mvm.rya.api.resolver.RyaTripleContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchDeleter;
@@ -80,6 +64,21 @@ import org.openrdf.model.Namespace;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AccumuloIndexer;
+import mvm.rya.accumulo.query.AccumuloRyaQueryEngine;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.layout.TableLayoutStrategy;
+import mvm.rya.api.persist.RyaDAO;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.RyaNamespaceManager;
+import mvm.rya.api.resolver.RyaTripleContext;
+import mvm.rya.api.resolver.triple.TripleRow;
+import mvm.rya.api.resolver.triple.TripleRowResolverException;
+
 /**
  * Class AccumuloRyaDAO
  * Date: Feb 29, 2012
@@ -102,7 +101,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     private BatchWriter bw_ns;
 
     private List<AccumuloIndexer> secondaryIndexers;
-    
+
     private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
     private RyaTableMutationsFactory ryaTableMutationsFactory;
     private TableLayoutStrategy tableLayoutStrategy;
@@ -132,15 +131,15 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             tableLayoutStrategy = conf.getTableLayoutStrategy();
             ryaContext = RyaTripleContext.getInstance(conf);
             ryaTableMutationsFactory = new RyaTableMutationsFactory(ryaContext);
-            
+
             secondaryIndexers = conf.getAdditionalIndexers();
-            
+
             TableOperations tableOperations = connector.tableOperations();
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getSpo());
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getPo());
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getOsp());
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getNs());
-            
+
             for (AccumuloIndexer index : secondaryIndexers) {
                 index.setConf(conf);
             }
@@ -154,7 +153,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
 
             bw_ns = connector.createBatchWriter(tableLayoutStrategy.getNs(), MAX_MEMORY,
                     MAX_TIME, 1);
-            
+
             for (AccumuloIndexer index : secondaryIndexers) {
                 index.setMultiTableBatchWriter(mt_bw);
             }
@@ -169,7 +168,8 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
         }
     }
 
-    public String getVersion() throws RyaDAOException {
+    @Override
+	public String getVersion() throws RyaDAOException {
         String version = null;
         CloseableIteration<RyaStatement, RyaDAOException> versIter = queryEngine.query(new RyaStatement(RTS_SUBJECT_RYA, RTS_VERSION_PREDICATE_RYA, null), conf);
         if (versIter.hasNext()) {
@@ -206,6 +206,10 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                 while (query.hasNext()) {
                     deleteSingleRyaStatement(query.next());
                 }
+
+                for (AccumuloIndexer index : secondaryIndexers) {
+                    index.deleteStatement(stmt);
+                }
             }
             mt_bw.flush();
             //TODO currently all indexers do not support delete
@@ -213,7 +217,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             throw new RyaDAOException(e);
         }
     }
-    
+
     @Override
     public void dropGraph(AccumuloRdfConfiguration conf, RyaURI... graphs) throws RyaDAOException {
         BatchDeleter bd_spo = null;
@@ -234,16 +238,16 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                 bd_po.fetchColumnFamily(new Text(graph.getData()));
                 bd_osp.fetchColumnFamily(new Text(graph.getData()));
             }
-            
+
             bd_spo.delete();
             bd_po.delete();
             bd_osp.delete();
-            
+
             //TODO indexers do not support delete-UnsupportedOperation Exception will be thrown
 //            for (AccumuloIndex index : secondaryIndexers) {
 //                index.dropGraph(graphs);
 //            }
-            
+
         } catch (Exception e) {
             throw new RyaDAOException(e);
         } finally {
@@ -251,7 +255,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             if (bd_po != null) bd_po.close();
             if (bd_osp != null) bd_osp.close();
         }
-        
+
     }
 
     protected void deleteSingleRyaStatement(RyaStatement stmt) throws TripleRowResolverException, MutationsRejectedException {
@@ -281,7 +285,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             //TODO: Should have a lock here in case we are adding and committing at the same time
             while (commitStatements.hasNext()) {
                 RyaStatement stmt = commitStatements.next();
-                
+
                 Map<TABLE_LAYOUT, Collection<Mutation>> mutationMap = ryaTableMutationsFactory.serialize(stmt);
                 Collection<Mutation> spo = mutationMap.get(TABLE_LAYOUT.SPO);
                 Collection<Mutation> po = mutationMap.get(TABLE_LAYOUT.PO);
@@ -289,7 +293,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                 bw_spo.addMutations(spo);
                 bw_po.addMutations(po);
                 bw_osp.addMutations(osp);
-                
+
                 for (AccumuloIndexer index : secondaryIndexers) {
                     index.storeStatement(stmt);
                 }
@@ -433,11 +437,13 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     	return mt_bw;
     }
 
-    public AccumuloRdfConfiguration getConf() {
+    @Override
+	public AccumuloRdfConfiguration getConf() {
         return conf;
     }
 
-    public void setConf(AccumuloRdfConfiguration conf) {
+    @Override
+	public void setConf(AccumuloRdfConfiguration conf) {
         this.conf = conf;
     }
 
@@ -449,7 +455,8 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
         this.ryaTableMutationsFactory = ryaTableMutationsFactory;
     }
 
-    public AccumuloRyaQueryEngine getQueryEngine() {
+    @Override
+	public AccumuloRyaQueryEngine getQueryEngine() {
         return queryEngine;
     }
 
@@ -460,13 +467,13 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     protected String[] getTables() {
         // core tables
         List<String> tableNames = Lists.newArrayList(
-                tableLayoutStrategy.getSpo(), 
-                tableLayoutStrategy.getPo(), 
-                tableLayoutStrategy.getOsp(), 
+                tableLayoutStrategy.getSpo(),
+                tableLayoutStrategy.getPo(),
+                tableLayoutStrategy.getOsp(),
                 tableLayoutStrategy.getNs(),
                 tableLayoutStrategy.getEval());
-        
-        // Additional Tables        
+
+        // Additional Tables
         for (AccumuloIndexer index : secondaryIndexers) {
             tableNames.add(index.getTableName());
         }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
index b8b3f65..1e2b18a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
@@ -30,19 +30,6 @@ import java.io.IOException;
 import java.util.Collection;
 import java.util.List;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.RyaTypeResolverException;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
@@ -56,23 +43,21 @@ import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Logger;
-import org.openrdf.model.Statement;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.BindingAssigner;
-import org.openrdf.query.algebra.evaluation.impl.CompareOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter;
-import org.openrdf.query.algebra.evaluation.impl.ConstantOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.IterativeEvaluationOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.OrderLimitOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.QueryModelNormalizer;
-import org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Bytes;
 
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaType;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.resolver.RyaContext;
+import mvm.rya.api.resolver.RyaTypeResolverException;
+import mvm.rya.api.resolver.triple.TripleRow;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
 public class EntityCentricIndex extends AbstractAccumuloIndexer {
 
     private static final Logger logger = Logger.getLogger(EntityCentricIndex.class);
@@ -81,23 +66,23 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
     private AccumuloRdfConfiguration conf;
     private BatchWriter writer;
     private boolean isInit = false;
-    
+
     public static final String CONF_TABLE_SUFFIX = "ac.indexer.eci.tablename";
 
-    
+
     private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, IOException,
             TableExistsException {
         ConfigUtils.createTableIfNotExists(conf, ConfigUtils.getEntityTableName(conf));
     }
-    
-    
-    @Override 
+
+
+    @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
+
   //initialization occurs in setConf because index is created using reflection
-    @Override 
+    @Override
     public void setConf(Configuration conf) {
         if (conf instanceof AccumuloRdfConfiguration) {
             this.conf = (AccumuloRdfConfiguration) conf;
@@ -126,7 +111,7 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
             }
         }
     }
-    
+
 
     @Override
     public String getTableName() {
@@ -147,7 +132,8 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
 
     }
 
-   
+
+    @Override
     public void storeStatement(RyaStatement stmt) throws IOException {
         Preconditions.checkNotNull(writer, "BatchWriter not Set");
         try {
@@ -161,7 +147,8 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
         }
     }
 
-    
+
+    @Override
     public void deleteStatement(RyaStatement stmt) throws IOException {
         Preconditions.checkNotNull(writer, "BatchWriter not Set");
         try {
@@ -185,10 +172,13 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
         byte[] columnQualifier = tripleRow.getColumnQualifier();
         Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier);
 
-        m.putDelete(cfText, cqText, new ColumnVisibility(tripleRow.getColumnVisibility()), tripleRow.getTimestamp());
+        byte[] columnVisibility = tripleRow.getColumnVisibility();
+        ColumnVisibility cv = columnVisibility == null ? EMPTY_CV : new ColumnVisibility(columnVisibility);
+
+        m.putDelete(cfText, cqText, cv, tripleRow.getTimestamp());
         return m;
     }
-    
+
     public static Collection<Mutation> createMutations(RyaStatement stmt) throws RyaTypeResolverException{
         Collection<Mutation> m = Lists.newArrayList();
         for (TripleRow tr : serializeStatement(stmt)){

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
index f529569..fdefbea 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
@@ -22,7 +22,6 @@ package mvm.rya.indexing.accumulo.freetext;
 
 
 import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.getNodeIterator;
-import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
@@ -35,25 +34,6 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.resolver.RyaToRdfConversions;
-import mvm.rya.indexing.FreeTextIndexer;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.Md5Hash;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-import mvm.rya.indexing.accumulo.freetext.iterators.BooleanTreeIterator;
-import mvm.rya.indexing.accumulo.freetext.query.ASTExpression;
-import mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils;
-import mvm.rya.indexing.accumulo.freetext.query.ASTSimpleNode;
-import mvm.rya.indexing.accumulo.freetext.query.ASTTerm;
-import mvm.rya.indexing.accumulo.freetext.query.ParseException;
-import mvm.rya.indexing.accumulo.freetext.query.QueryParser;
-import mvm.rya.indexing.accumulo.freetext.query.QueryParserTreeConstants;
-import mvm.rya.indexing.accumulo.freetext.query.SimpleNode;
-import mvm.rya.indexing.accumulo.freetext.query.TokenMgrError;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
@@ -82,6 +62,26 @@ import org.openrdf.query.QueryEvaluationException;
 
 import com.google.common.base.Charsets;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RyaToRdfConversions;
+import mvm.rya.indexing.FreeTextIndexer;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.Md5Hash;
+import mvm.rya.indexing.accumulo.StatementSerializer;
+import mvm.rya.indexing.accumulo.freetext.iterators.BooleanTreeIterator;
+import mvm.rya.indexing.accumulo.freetext.query.ASTExpression;
+import mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils;
+import mvm.rya.indexing.accumulo.freetext.query.ASTSimpleNode;
+import mvm.rya.indexing.accumulo.freetext.query.ASTTerm;
+import mvm.rya.indexing.accumulo.freetext.query.ParseException;
+import mvm.rya.indexing.accumulo.freetext.query.QueryParser;
+import mvm.rya.indexing.accumulo.freetext.query.QueryParserTreeConstants;
+import mvm.rya.indexing.accumulo.freetext.query.SimpleNode;
+import mvm.rya.indexing.accumulo.freetext.query.TokenMgrError;
+
 /**
  * The {@link AccumuloFreeTextIndexer} stores and queries "free text" data from statements into tables in Accumulo. Specifically, this class
  * stores data into two different Accumulo Tables. This is the <b>document table</b> (default name: triplestore_text) and the <b>terms
@@ -92,27 +92,27 @@ import com.google.common.base.Charsets;
  * <p>
  * For each document, the document table will store the following information:
  * <P>
- * 
+ *
  * <pre>
- * Row (partition) | Column Family  | Column Qualifier | Value 
+ * Row (partition) | Column Family  | Column Qualifier | Value
  * ================+================+==================+==========
- * shardID         | d\x00          | documentHash     | Document 
- * shardID         | s\x00Subject   | documentHash     | (empty) 
- * shardID         | p\x00Predicate | documentHash     | (empty) 
- * shardID         | o\x00Object    | documentHash     | (empty) 
- * shardID         | c\x00Context   | documentHash     | (empty) 
+ * shardID         | d\x00          | documentHash     | Document
+ * shardID         | s\x00Subject   | documentHash     | (empty)
+ * shardID         | p\x00Predicate | documentHash     | (empty)
+ * shardID         | o\x00Object    | documentHash     | (empty)
+ * shardID         | c\x00Context   | documentHash     | (empty)
  * shardID         | t\x00token     | documentHash     | (empty)
  * </pre>
  * <p>
  * Note: documentHash is a sha256 Hash of the Document's Content
  * <p>
- * The terms table is used for expanding wildcard search terms. For each token in the document table, the table sill store the following
+ * The terms table is used for expanding wildcard search terms. For each token in the document table, the table will store the following
  * information:
- * 
+ *
  * <pre>
- * Row (partition)   | CF/CQ/Value 
+ * Row (partition)   | CF/CQ/Value
  * ==================+=============
- * l\x00token        | (empty) 
+ * l\x00token        | (empty)
  * r\x00Reversetoken | (empty)
  * </pre>
  * <p>
@@ -121,7 +121,7 @@ import com.google.common.base.Charsets;
  * into car, bar, and far.
  * <p>
  * Example: Given these three statements as inputs:
- * 
+ *
  * <pre>
  *     <uri:paul> rdfs:label "paul smith"@en <uri:graph1>
  *     <uri:steve> rdfs:label "steven anthony miller"@en <uri:graph1>
@@ -131,9 +131,9 @@ import com.google.common.base.Charsets;
  * Here's what the tables would look like: (Note: the hashes aren't real, the rows are not sorted, and the partition ids will vary.)
  * <p>
  * Triplestore_text
- * 
+ *
  * <pre>
- * Row (partition) | Column Family                   | Column Qualifier | Value 
+ * Row (partition) | Column Family                   | Column Qualifier | Value
  * ================+=================================+==================+==========
  * 000000          | d\x00                           | 08b3d233a        | uri:graph1x00uri:paul\x00rdfs:label\x00"paul smith"@en
  * 000000          | s\x00uri:paul                   | 08b3d233a        | (empty)
@@ -142,7 +142,7 @@ import com.google.common.base.Charsets;
  * 000000          | c\x00uri:graph1                 | 08b3d233a        | (empty)
  * 000000          | t\x00paul                       | 08b3d233a        | (empty)
  * 000000          | t\x00smith                      | 08b3d233a        | (empty)
- * 
+ *
  * 000000          | d\x00                           | 3a575534b        | uri:graph1x00uri:steve\x00rdfs:label\x00"steven anthony miller"@en
  * 000000          | s\x00uri:steve                  | 3a575534b        | (empty)
  * 000000          | p\x00rdfs:label                 | 3a575534b        | (empty)
@@ -151,7 +151,7 @@ import com.google.common.base.Charsets;
  * 000000          | t\x00steven                     | 3a575534b        | (empty)
  * 000000          | t\x00anthony                    | 3a575534b        | (empty)
  * 000000          | t\x00miller                     | 3a575534b        | (empty)
- * 
+ *
  * 000001          | d\x00                           | 7bf670d06        | uri:graph1x00uri:steve\x00rdfs:label\x00"steve miller"@en
  * 000001          | s\x00uri:steve                  | 7bf670d06        | (empty)
  * 000001          | p\x00rdfs:label                 | 7bf670d06        | (empty)
@@ -163,9 +163,9 @@ import com.google.common.base.Charsets;
  * <p>
  * triplestore_terms
  * <p>
- * 
+ *
  * <pre>
- * Row (partition)   | CF/CQ/Value 
+ * Row (partition)   | CF/CQ/Value
  * ==================+=============
  * l\x00paul         | (empty)
  * l\x00smith        | (empty)
@@ -179,12 +179,14 @@ import com.google.common.base.Charsets;
  * r\x00ynohtna      | (empty)
  * r\x00rellim       | (empty)
  * r\x00evets        | (empty)
- * 
+ *
  * <pre>
  */
 public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements FreeTextIndexer  {
     private static final Logger logger = Logger.getLogger(AccumuloFreeTextIndexer.class);
 
+    private static final boolean IS_TERM_TABLE_TOKEN_DELETION_ENABLED = true;
+
     private static final byte[] EMPTY_BYTES = new byte[] {};
     private static final Text EMPTY_TEXT = new Text(EMPTY_BYTES);
     private static final Value EMPTY_VALUE = new Value(EMPTY_BYTES);
@@ -202,10 +204,10 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
     private Set<URI> validPredicates;
 
     private Configuration conf;
-    
+
     private boolean isInit = false;
 
-    
+
     private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException,
             TableExistsException {
         String doctable = ConfigUtils.getFreeTextDocTablename(conf);
@@ -262,8 +264,8 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
 
         queryTermLimit = ConfigUtils.getFreeTextTermLimit(conf);
     }
-    
-    
+
+
   //initialization occurs in setConf because index is created using reflection
     @Override
     public void setConf(Configuration conf) {
@@ -272,27 +274,18 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
             try {
                 init();
                 isInit = true;
-            } catch (AccumuloException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (AccumuloSecurityException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (TableNotFoundException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (TableExistsException e) {
+            } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException | TableExistsException e) {
                 logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
                 throw new RuntimeException(e);
             }
         }
     }
-    
+
     @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
+
 
     private void storeStatement(Statement statement) throws IOException {
         // if the predicate list is empty, accept all predicates.
@@ -363,6 +356,12 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
         return m;
     }
 
+    private static Mutation createEmptyPutDeleteMutation(Text row) {
+        Mutation m = new Mutation(row);
+        m.putDelete(EMPTY_TEXT, EMPTY_TEXT);
+        return m;
+    }
+
     private static Text genPartition(int partition, int numParitions) {
         int length = Integer.toString(numParitions).length();
         return new Text(String.format("%0" + length + "d", Math.abs(partition % numParitions)));
@@ -471,13 +470,7 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
     private Scanner getScanner(String tablename) throws IOException {
         try {
             return ConfigUtils.createScanner(tablename, conf);
-        } catch (AccumuloException e) {
-            logger.error("Error connecting to " + tablename);
-            throw new IOException(e);
-        } catch (AccumuloSecurityException e) {
-            logger.error("Error connecting to " + tablename);
-            throw new IOException(e);
-        } catch (TableNotFoundException e) {
+        } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException e) {
             logger.error("Error connecting to " + tablename);
             throw new IOException(e);
         }
@@ -574,7 +567,9 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
 
             @Override
             public void close() throws QueryEvaluationException {
-                s.close();
+                if (s != null) {
+                    s.close();
+                }
             }
         };
     }
@@ -582,7 +577,7 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
     /**
      * Simple adapter that parses the query using {@link QueryParser}. Note: any checked exceptions thrown by {@link QueryParser} are
      * re-thrown as {@link IOException}s.
-     * 
+     *
      * @param query
      * @return
      * @throws IOException
@@ -600,12 +595,121 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
         }
         return root;
     }
-    
-   
+
+
     @Override
     public String getTableName() {
        return ConfigUtils.getFreeTextDocTablename(conf);
     }
 
-    
+    private void deleteStatement(Statement statement) throws IOException {
+        // if the predicate list is empty, accept all predicates.
+        // Otherwise, make sure the predicate is on the "valid" list
+        boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate());
+
+        if (isValidPredicate && (statement.getObject() instanceof Literal)) {
+
+            // Get the tokens
+            String text = statement.getObject().stringValue().toLowerCase();
+            SortedSet<String> tokens = tokenizer.tokenize(text);
+
+            if (!tokens.isEmpty()) {
+                // Get Document Data
+                String docContent = StatementSerializer.writeStatement(statement);
+
+                String docId = Md5Hash.md5Base64(docContent);
+
+                // Setup partition
+                Text partition = genPartition(docContent.hashCode(), docTableNumPartitions);
+
+                Mutation docTableMut = new Mutation(partition);
+                List<Mutation> termTableMutations = new ArrayList<Mutation>();
+
+                Text docIdText = new Text(docId);
+
+                // Delete the Document Data
+                docTableMut.putDelete(ColumnPrefixes.DOCS_CF_PREFIX, docIdText);
+
+                // Delete the statement parts in index
+                docTableMut.putDelete(ColumnPrefixes.getSubjColFam(statement), docIdText);
+                docTableMut.putDelete(ColumnPrefixes.getPredColFam(statement), docIdText);
+                docTableMut.putDelete(ColumnPrefixes.getObjColFam(statement), docIdText);
+                docTableMut.putDelete(ColumnPrefixes.getContextColFam(statement), docIdText);
+
+
+                // Delete the statement terms in index
+                for (String token : tokens) {
+                    if (IS_TERM_TABLE_TOKEN_DELETION_ENABLED) {
+                        int rowId = Integer.parseInt(partition.toString());
+                        boolean doesTermExistInOtherDocs = doesTermExistInOtherDocs(token, rowId, docIdText);
+                        // Only delete the term from the term table if it doesn't appear in other docs
+                        if (!doesTermExistInOtherDocs) {
+                            // Delete the term in the term table
+                            termTableMutations.add(createEmptyPutDeleteMutation(ColumnPrefixes.getTermListColFam(token)));
+                            termTableMutations.add(createEmptyPutDeleteMutation(ColumnPrefixes.getRevTermListColFam(token)));
+                        }
+                    }
+
+                    // Un-tie the token to the document
+                    docTableMut.putDelete(ColumnPrefixes.getTermColFam(token), docIdText);
+                }
+
+                // write the mutations
+                try {
+                    docTableBw.addMutation(docTableMut);
+                    termTableBw.addMutations(termTableMutations);
+                } catch (MutationsRejectedException e) {
+                    logger.error("error adding mutation", e);
+                    throw new IOException(e);
+                }
+
+            }
+        }
+    }
+
+    @Override
+    public void deleteStatement(RyaStatement statement) throws IOException {
+        deleteStatement(RyaToRdfConversions.convertStatement(statement));
+    }
+
+    /**
+     * Checks to see if the provided term appears in other documents.
+     * @param term the term to search for.
+     * @param currentDocId the current document ID that the search term exists in.
+     * @return {@code true} if the term was found in other documents. {@code false} otherwise.
+     */
+    private boolean doesTermExistInOtherDocs(String term, int currentDocId, Text docIdText) {
+        try {
+            String freeTextDocTableName = ConfigUtils.getFreeTextDocTablename(conf);
+            Scanner scanner = getScanner(freeTextDocTableName);
+
+            String t = StringUtils.removeEnd(term, "*").toLowerCase();
+            Text queryTerm = ColumnPrefixes.getTermColFam(t);
+
+            // perform query and read results
+            scanner.fetchColumnFamily(queryTerm);
+
+            for (Entry<Key, Value> entry : scanner) {
+                Key key = entry.getKey();
+                Text row = key.getRow();
+                int rowId = Integer.parseInt(row.toString());
+                // We only want to check other documents from the one we're deleting
+                if (rowId != currentDocId) {
+                    Text columnFamily = key.getColumnFamily();
+                    String columnFamilyValue = columnFamily.toString();
+                    // Check that the value has the term prefix
+                    if (columnFamilyValue.startsWith(ColumnPrefixes.TERM_CF_PREFIX.toString())) {
+                        Text text = ColumnPrefixes.removePrefix(columnFamily);
+                        String value = text.toString();
+                        if (value.equals(term)) {
+                            return true;
+                        }
+                    }
+                }
+            }
+        } catch (IOException e) {
+            logger.error("Error searching for the existance of the term in other documents", e);
+        }
+        return false;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
index 37acf89..c8b5b4a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
@@ -1,4 +1,4 @@
-package mvm.rya.indexing.accumulo.geo;
+package mvm.rya.indexing.accumulo.geo;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -21,8 +21,6 @@ package mvm.rya.indexing.accumulo.geo;
 
 
 
-import info.aduna.iteration.CloseableIteration;
-
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
@@ -30,37 +28,23 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RyaToRdfConversions;
-import mvm.rya.indexing.GeoIndexer;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.Md5Hash;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.MultiTableBatchWriter;
-import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 import org.geotools.data.DataStore;
 import org.geotools.data.DataStoreFinder;
+import org.geotools.data.DataUtilities;
 import org.geotools.data.FeatureSource;
 import org.geotools.data.FeatureStore;
 import org.geotools.data.Query;
+import org.geotools.factory.CommonFactoryFinder;
 import org.geotools.factory.Hints;
 import org.geotools.feature.DefaultFeatureCollection;
 import org.geotools.feature.FeatureIterator;
@@ -68,21 +52,33 @@ import org.geotools.feature.SchemaException;
 import org.geotools.feature.simple.SimpleFeatureBuilder;
 import org.geotools.filter.text.cql2.CQLException;
 import org.geotools.filter.text.ecql.ECQL;
+import org.locationtech.geomesa.accumulo.data.AccumuloDataStore;
 import org.locationtech.geomesa.accumulo.index.Constants;
 import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes;
 import org.opengis.feature.simple.SimpleFeature;
 import org.opengis.feature.simple.SimpleFeatureType;
 import org.opengis.filter.Filter;
+import org.opengis.filter.FilterFactory;
+import org.opengis.filter.identity.Identifier;
 import org.openrdf.model.Literal;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;
 import org.openrdf.query.QueryEvaluationException;
 
-import com.google.common.base.Preconditions;
 import com.vividsolutions.jts.geom.Geometry;
 import com.vividsolutions.jts.io.ParseException;
 import com.vividsolutions.jts.io.WKTReader;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RyaToRdfConversions;
+import mvm.rya.indexing.GeoIndexer;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.Md5Hash;
+import mvm.rya.indexing.accumulo.StatementSerializer;
+
 /**
  * A {@link GeoIndexer} wrapper around a GeoMesa {@link AccumuloDataStore}. This class configures and connects to the Datastore, creates the
  * RDF Feature Type, and interacts with the Datastore.
@@ -129,7 +125,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
     private static final Logger logger = Logger.getLogger(GeoMesaGeoIndexer.class);
 
     private static final String FEATURE_NAME = "RDF";
-  
+
     private static final String SUBJECT_ATTRIBUTE = "S";
     private static final String PREDICATE_ATTRIBUTE = "P";
     private static final String OBJECT_ATTRIBUTE = "O";
@@ -141,7 +137,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
     private FeatureSource<SimpleFeatureType, SimpleFeature> featureSource;
     private SimpleFeatureType featureType;
     private boolean isInit = false;
-   
+
     //initialization occurs in setConf because index is created using reflection
     @Override
     public void setConf(Configuration conf) {
@@ -156,18 +152,18 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
             }
         }
     }
-    
+
     @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
+
 
     private void init() throws IOException {
         validPredicates = ConfigUtils.getGeoPredicates(conf);
 
         DataStore dataStore = createDataStore(conf);
-        
+
         try {
             featureType = getStatementFeatureType(dataStore);
         } catch (IOException e) {
@@ -235,7 +231,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
         // create a feature collection
         DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
 
-        
+
         for (RyaStatement ryaStatement : ryaStatements) {
 
             Statement statement = RyaToRdfConversions.convertStatement(ryaStatement);
@@ -264,7 +260,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
     public void storeStatement(RyaStatement statement) throws IOException {
         storeStatements(Collections.singleton(statement));
     }
-    
+
     private static SimpleFeature createFeature(SimpleFeatureType featureType, Statement statement) throws ParseException {
         String subject = StatementSerializer.writeSubject(statement);
         String predicate = StatementSerializer.writePredicate(statement);
@@ -358,7 +354,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
 
             @Override
             public Statement next() throws QueryEvaluationException {
-                SimpleFeature feature = (SimpleFeature) getIterator().next();
+                SimpleFeature feature = getIterator().next();
                 String subjectString = feature.getAttribute(SUBJECT_ATTRIBUTE).toString();
                 String predicateString = feature.getAttribute(PREDICATE_ATTRIBUTE).toString();
                 String objectString = feature.getAttribute(OBJECT_ATTRIBUTE).toString();
@@ -440,8 +436,42 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
        return ConfigUtils.getGeoTablename(conf);
     }
 
+    private void deleteStatements(Collection<RyaStatement> ryaStatements) throws IOException {
+        // create a feature collection
+        DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
+
+        for (RyaStatement ryaStatement : ryaStatements) {
+            Statement statement = RyaToRdfConversions.convertStatement(ryaStatement);
+            // if the predicate list is empty, accept all predicates.
+            // Otherwise, make sure the predicate is on the "valid" list
+            boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate());
+
+            if (isValidPredicate && (statement.getObject() instanceof Literal)) {
+                try {
+                    SimpleFeature feature = createFeature(featureType, statement);
+                    featureCollection.add(feature);
+                } catch (ParseException e) {
+                    logger.warn("Error getting geo from statement: " + statement.toString(), e);
+                }
+            }
+        }
 
+        // remove this feature collection from the store
+        if (!featureCollection.isEmpty()) {
+            Set<Identifier> featureIds = new HashSet<Identifier>();
+            FilterFactory filterFactory = CommonFactoryFinder.getFilterFactory(null);
+            Set<String> stringIds = DataUtilities.fidSet(featureCollection);
+            for (String id : stringIds) {
+                featureIds.add(filterFactory.featureId(id));
+            }
+            Filter filter = filterFactory.id(featureIds);
+            featureStore.removeFeatures(filter);
+        }
+    }
 
-  
 
+    @Override
+    public void deleteStatement(RyaStatement statement) throws IOException {
+        deleteStatements(Collections.singleton(statement));
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
index e2f98b3..095f18f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
@@ -20,8 +20,6 @@ package mvm.rya.indexing.accumulo.temporal;
  */
 
 
-import info.aduna.iteration.CloseableIteration;
-
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
 import java.util.Collection;
@@ -36,21 +34,6 @@ import java.util.regex.Pattern;
 
 import javax.xml.datatype.XMLGregorianCalendar;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RyaToRdfConversions;
-import mvm.rya.indexing.KeyParts;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.TemporalIndexer;
-import mvm.rya.indexing.TemporalInstant;
-import mvm.rya.indexing.TemporalInterval;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchScanner;
@@ -77,13 +60,24 @@ import org.openrdf.model.URI;
 import org.openrdf.query.QueryEvaluationException;
 
 import cern.colt.Arrays;
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RyaToRdfConversions;
+import mvm.rya.indexing.KeyParts;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.TemporalIndexer;
+import mvm.rya.indexing.TemporalInstant;
+import mvm.rya.indexing.TemporalInterval;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.StatementSerializer;
 
 public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements TemporalIndexer {
 
 	private static final Logger logger = Logger.getLogger(AccumuloTemporalIndexer.class);
 
     private static final String CF_INTERVAL = "interval";
- 
+
 
 
     // Delimiter used in the interval stored in the triple's object literal.
@@ -99,11 +93,11 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
     private Set<URI> validPredicates;
     private String temporalIndexTableName;
-    
+
     private boolean isInit = false;
 
-    
-    
+
+
     private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException,
             TableExistsException {
         temporalIndexTableName = ConfigUtils.getTemporalTableName(conf);
@@ -116,7 +110,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
         validPredicates = ConfigUtils.getTemporalPredicates(conf);
     }
-    
+
     //initialization occurs in setConf because index is created using reflection
     @Override
     public void setConf(Configuration conf) {
@@ -140,13 +134,13 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             }
         }
     }
-    
+
     @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
-    
+
+
     /**
      * Store a statement in the index if it meets the criterion: Object should be
      * a literal and one of the validPredicates from the configuration.
@@ -180,18 +174,18 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             throw new IOException("While adding interval/instant for statement =" + statement, e);
         }
     }
-    
-    
+
+
     @Override
     public void storeStatement(RyaStatement statement) throws IllegalArgumentException, IOException {
         storeStatement(RyaToRdfConversions.convertStatement(statement));
     }
-    
-    
+
+
 
     /**
      * parse the literal dates from the object of a statement.
-     * 
+     *
      * @param statement
      * @param outputDateTimes
      */
@@ -209,7 +203,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	        	outputDateTimes[1] = new DateTime(matcher.group(2));
 	        	return;
     		} catch (java.lang.IllegalArgumentException e) {
-                logThis = e.getMessage() + " " + logThis;	
+                logThis = e.getMessage() + " " + logThis;
                 outputDateTimes[0]=null;
                 outputDateTimes[1]=null;
     		}
@@ -221,7 +215,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 			outputDateTimes[1] = null;
 			return;
 		} catch (java.lang.IllegalArgumentException e) {
-            logThis = e.getMessage();			
+            logThis = e.getMessage();
 		}
 		// Try again using Joda Time DateTime.parse()
 		try {
@@ -230,13 +224,58 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 			//System.out.println(">>>>>>>Joda parsed: "+literalValue.stringValue());
 			return;
 		} catch (java.lang.IllegalArgumentException e) {
-            logThis = e.getMessage() + " " + logThis;			
+            logThis = e.getMessage() + " " + logThis;
 		}
-        logger.warn("TemporalIndexer is unable to parse the date/time from statement="  + statement.toString() + " " +logThis);			
+        logger.warn("TemporalIndexer is unable to parse the date/time from statement="  + statement.toString() + " " +logThis);
 		return;
     }
 
     /**
+     * Remove an interval index
+     * TODO: integrate into KeyParts (or eliminate)
+     * @param writer
+     * @param cv
+     * @param interval
+     * @throws MutationsRejectedException
+     */
+    public void removeInterval(BatchWriter writer, TemporalInterval interval, Statement statement) throws MutationsRejectedException {
+        Text cf = new Text(StatementSerializer.writeContext(statement));
+        Text cqBegin = new Text(KeyParts.CQ_BEGIN);
+        Text cqEnd = new Text(KeyParts.CQ_END);
+
+        // Start Begin index
+        Text keyText = new Text(interval.getAsKeyBeginning());
+        KeyParts.appendUniqueness(statement, keyText);
+        Mutation m = new Mutation(keyText);
+        m.putDelete(cf, cqBegin);
+        writer.addMutation(m);
+
+        // now the end index:
+        keyText = new Text(interval.getAsKeyEnd());
+        KeyParts.appendUniqueness(statement, keyText);
+        m = new Mutation(keyText);
+        m.putDelete(cf, cqEnd);
+        writer.addMutation(m);
+    }
+
+    /**
+     * Remove an interval instant
+     *
+     * @param writer
+     * @param cv
+     * @param instant
+     * @throws MutationsRejectedException
+     */
+    public void removeInstant(BatchWriter writer, TemporalInstant instant, Statement statement) throws MutationsRejectedException {
+        KeyParts keyParts = new KeyParts(statement, instant);
+        for (KeyParts  k: keyParts) {
+            Mutation m = new Mutation(k.getStoreKey());
+            m.putDelete(k.cf, k.cq);
+            writer.addMutation(m);
+        }
+    }
+
+    /**
      * Index a new interval
      * TODO: integrate into KeyParts (or eliminate)
      * @param writer
@@ -250,9 +289,9 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         Text cf = new Text(StatementSerializer.writeContext(statement));
         Text cqBegin = new Text(KeyParts.CQ_BEGIN);
         Text cqEnd = new Text(KeyParts.CQ_END);
-        
+
         // Start Begin index
-        Text keyText =new Text(interval.getAsKeyBeginning());
+        Text keyText = new Text(interval.getAsKeyBeginning());
         KeyParts.appendUniqueness(statement, keyText);
         Mutation m = new Mutation(keyText);
         m.put(cf, cqBegin, statementValue);
@@ -270,29 +309,29 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
 
     /**
-     * Index a new interval
-     * Make indexes that handle this expression:  
-     *     hash( s? p? ) ?o 
+     * Index a new instant
+     * Make indexes that handle this expression:
+     *     hash( s? p? ) ?o
      *         == o union hash(s)o union hash(p)o  union hash(sp)o
-     * 
+     *
      * @param writer
      * @param cv
      * @param instant
      * @throws MutationsRejectedException
      */
     public void addInstant(BatchWriter writer, TemporalInstant instant, Statement statement) throws MutationsRejectedException {
-    	KeyParts keyParts = new KeyParts(statement, instant);
-    	for (KeyParts k: keyParts) { 
-			Mutation m = new Mutation(k.getStoreKey());
-			m.put(k.cf, k.cq,k.getValue());
-			writer.addMutation(m);
-    	}
+        KeyParts keyParts = new KeyParts(statement, instant);
+        for (KeyParts k : keyParts) {
+            Mutation m = new Mutation(k.getStoreKey());
+            m.put(k.cf, k.cq,k.getValue());
+            writer.addMutation(m);
+        }
     }
 
 
     /**
      * creates a scanner and handles all the throwables and nulls.
-     * 
+     *
      * @param scanner
      * @return
      * @throws IOException
@@ -364,10 +403,10 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 			@Override
 			public Range getRange(KeyParts keyParts) {
 		    	Text start= null;
-				if (keyParts.constraintPrefix != null )  // Yes, has constraints 
+				if (keyParts.constraintPrefix != null )  // Yes, has constraints
 					start = keyParts.constraintPrefix;   // <-- start specific logic
 				else
-					start = new Text(KeyParts.HASH_PREFIX_FOLLOWING);  
+					start = new Text(KeyParts.HASH_PREFIX_FOLLOWING);
 				Text endAt = keyParts.getQueryKey();			       // <-- end specific logic
 				//System.out.println("Scanning queryInstantBeforeInstant: from:" + KeyParts.toHumanString(start) + " up to:" + KeyParts.toHumanString(endAt));
 				return new Range(start, true, endAt, false);
@@ -376,7 +415,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 		ScannerBase scanner = query.doQuery(queryInstant, constraints);
 		return getContextIteratorWrapper(scanner, constraints.getContext());
     }
-    
+
     /**
      * get statements where the date object is after the given queryInstant.
      */
@@ -464,7 +503,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
     /**
      * Get intervals stored in the repository matching the given interval.
-     * Indexing Intervals  will probably change or be removed.  
+     * Indexing Intervals  will probably change or be removed.
      * Currently predicate and subject constraints are filtered on the client.
      */
     @Override
@@ -492,7 +531,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	/**
 	 * find intervals stored in the repository before the given Interval. Find interval endings that are
 	 * before the given beginning.
-     * Indexing Intervals  will probably change or be removed.  
+     * Indexing Intervals  will probably change or be removed.
      * Currently predicate and subject constraints are filtered on the client.
 	 */
 	@Override
@@ -515,20 +554,20 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	/**
 	 * Interval after given interval.  Find intervals that begin after the endings of the given interval.
 	 * Use the special following prefix mechanism to avoid matching the beginning date.
-     * Indexing Intervals  will probably change or be removed.  
+     * Indexing Intervals  will probably change or be removed.
      * Currently predicate and subject and context constraints are filtered on the client.
 	 */
 	@Override
 	public CloseableIteration<Statement, QueryEvaluationException> queryIntervalAfter(
 	        TemporalInterval queryInterval, StatementContraints constraints)
 	        throws QueryEvaluationException {
-	
+
 	    Scanner scanner = getScanner();
 	    if (scanner != null) {
 	        // get rows where the start date is greater than the queryInterval.getEnd()
 	        Range range = new Range(new Key(Range.followingPrefix(new Text(queryInterval.getHasEnd().getAsKeyBytes()))), false, null, true);
 	        scanner.setRange(range);
-	        
+
 	        if (constraints.hasContext())
 	        	scanner.fetchColumn(new Text(constraints.getContext().toString()), new Text(KeyParts.CQ_BEGIN));
 	        else
@@ -540,14 +579,14 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	// --
 	// -- END of Query functions.  Next up, general stuff used by the queries above.
 	// --
-	
+
 	/**
 	 * Allows passing range specific logic into doQuery.
 	 * Each query function implements an anonymous instance of this and calls it's doQuery().
 	 */
 	abstract class Query {
 		abstract protected Range getRange(KeyParts keyParts);
-	
+
 		public ScannerBase doQuery(TemporalInstant queryInstant, StatementContraints constraints) throws QueryEvaluationException {
 			// key is contraintPrefix + time, or just time.
 			// Any constraints handled here, if the constraints are empty, the
@@ -558,7 +597,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 				scanner = getBatchScanner();
 			else
 				scanner = getScanner();
-	
+
 			Collection<Range> ranges = new HashSet<Range>();
 			KeyParts lastKeyParts = null;
 			Range range = null;
@@ -579,7 +618,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
 	/**
      * An iteration wrapper for a loaded scanner that is returned for each query above.
-     * 
+     *
      * @param scanner
      *            the results to iterate, then close.
      * @return an anonymous object that will iterate the resulting statements from a given scanner.
@@ -623,14 +662,14 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         };
     }
 
-    
+
     /**
      * An iteration wrapper for a loaded scanner that is returned for partially supported interval queries above.
-     * 
+     *
      * @param scanner  the results to iterate, then close.
      * @param constraints  limit statements returned by next() to those matching the constraints.
      * @return an anonymous object that will iterate the resulting statements from a given scanner.
-     * @throws QueryEvaluationException 
+     * @throws QueryEvaluationException
      */
 	private static CloseableIteration<Statement, QueryEvaluationException> getConstrainedIteratorWrapper(final Scanner scanner, final StatementContraints constraints) {
 		if (!constraints.hasContext() && !constraints.hasSubject() && !constraints.hasPredicates())
@@ -645,11 +684,11 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
     /**
      * An iteration wrapper for a loaded scanner that is returned for queries above.
      * Currently, this temporal index supports contexts only on the client, using this filter.
-     * 
+     *
      * @param scanner  the results to iterate, then close.
      * @param constraints  limit statements returned by next() to those matching the constraints.
      * @return an anonymous object that will iterate the resulting statements from a given scanner.
-     * @throws QueryEvaluationException 
+     * @throws QueryEvaluationException
      */
 	private static CloseableIteration<Statement, QueryEvaluationException> getContextIteratorWrapper(final ScannerBase scanner, final Resource context) {
 		if (context==null)
@@ -671,7 +710,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         	private boolean isInitialized = false;
         	final private Iterator<Entry<Key, Value>> i;
         	final private ScannerBase scanner;
-        	
+
         	ConstrainedIteratorWrapper(ScannerBase scanner) {
         		this.scanner = scanner;
         		i=scanner.iterator();
@@ -698,7 +737,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             }
 
 			/**
-			 * Gets the next statement meeting constraints and stores in nextStatement.  
+			 * Gets the next statement meeting constraints and stores in nextStatement.
 			 * Sets null when all done, or on exception.
 			 * @throws QueryEvaluationException
 			 */
@@ -727,7 +766,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             	}
 			}
 			public abstract boolean allowedBy(Statement s);
-			
+
             @Override
             public void remove() {
                 throw new UnsupportedOperationException("Remove not implemented");
@@ -751,15 +790,15 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         	{System.out.println("Constrain subject: "+constraints.getSubject()+" != " + statement.getSubject()); return false;}
     		//return false;
 
-    	if (! allowedByContext(statement, constraints.getContext())) 
+    	if (! allowedByContext(statement, constraints.getContext()))
 		    return false;
     	    //{System.out.println("Constrain context: "+constraints.getContext()+" != " + statement.getContext()); return false;}
-	
+
     	if (constraints.hasPredicates() && ! constraints.getPredicates().contains(statement.getPredicate()))
     		return false;
     	    //{System.out.println("Constrain predicate: "+constraints.getPredicates()+" != " + statement.getPredicate()); return false;}
-    	
-    	System.out.println("allow statement: "+ statement.toString()); 
+
+    	System.out.println("allow statement: "+ statement.toString());
 		return true;
 	}
 
@@ -812,13 +851,42 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             throw new IOException(msg, e);
         }
     }
-    
-    
+
+
 
     @Override
     public String getTableName() {
        return ConfigUtils.getTemporalTableName(conf);
     }
 
-    
+    private void deleteStatement(Statement statement) throws IOException, IllegalArgumentException {
+        // if the predicate list is empty, accept all predicates.
+        // Otherwise, make sure the predicate is on the "valid" list
+        boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate());
+        if (!isValidPredicate || !(statement.getObject() instanceof Literal))
+            return;
+        DateTime[] indexDateTimes = new DateTime[2]; // 0 begin, 1 end of interval
+        extractDateTime(statement, indexDateTimes);
+        if (indexDateTimes[0] == null) {
+            return;
+        }
+
+        // Remove this as an instant, or interval.
+        try {
+            if (indexDateTimes[1] != null) {
+                TemporalInterval interval = new TemporalInterval(new TemporalInstantRfc3339(indexDateTimes[0]), new TemporalInstantRfc3339(indexDateTimes[1]));
+                removeInterval(temporalIndexBatchWriter, interval, statement);
+            } else {
+                TemporalInstant instant = new TemporalInstantRfc3339(indexDateTimes[0]);
+                removeInstant(temporalIndexBatchWriter, instant, statement);
+            }
+        } catch (MutationsRejectedException e) {
+            throw new IOException("While adding interval/instant for statement =" + statement, e);
+        }
+    }
+
+    @Override
+    public void deleteStatement(RyaStatement statement) throws IllegalArgumentException, IOException {
+        deleteStatement(RyaToRdfConversions.convertStatement(statement));
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
index a0a3a03..c6bd9c2 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
@@ -1,5 +1,30 @@
 package mvm.rya.indexing.accumulo.freetext;
 
+import java.util.HashSet;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.admin.TableOperations;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.URIImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.vocabulary.RDFS;
+
+import com.google.common.collect.Sets;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,11 +47,6 @@ package mvm.rya.indexing.accumulo.freetext;
 
 
 import info.aduna.iteration.CloseableIteration;
-
-import java.util.HashSet;
-import java.util.Map.Entry;
-import java.util.Set;
-
 import junit.framework.Assert;
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaType;
@@ -36,27 +56,6 @@ import mvm.rya.api.resolver.RyaToRdfConversions;
 import mvm.rya.indexing.StatementContraints;
 import mvm.rya.indexing.accumulo.ConfigUtils;
 
-import org.apache.accumulo.core.Constants;
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.admin.TableOperations;
-import org.apache.accumulo.core.data.Key;
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Before;
-import org.junit.Test;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.model.vocabulary.RDFS;
-
-import com.google.common.collect.Sets;
-
 public class AccumuloFreeTextIndexerTest {
     private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints();
 
@@ -90,107 +89,153 @@ public class AccumuloFreeTextIndexerTest {
 
     @Test
     public void testSearch() throws Exception {
-        
-        AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer();
-        f.setConf(conf);
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+
+            URI subject = new URIImpl("foo:subj");
+            URI predicate = RDFS.LABEL;
+            Value object = vf.createLiteral("this is a new hat");
+
+            URI context = new URIImpl("foo:context");
 
-        ValueFactory vf = new ValueFactoryImpl();
+            Statement statement = vf.createStatement(subject, predicate, object, context);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement));
+            f.flush();
 
-        URI subject = new URIImpl("foo:subj");
-        URI predicate = RDFS.LABEL;
-        Value object = vf.createLiteral("this is a new hat");
+            printTables(conf);
 
-        URI context = new URIImpl("foo:context");
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("asdf", EMPTY_CONSTRAINTS)));
 
-        Statement statement = vf.createStatement(subject, predicate, object, context);
-        f.storeStatement(RdfToRyaConversions.convertStatement(statement));
-        f.flush();
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & !is", EMPTY_CONSTRAINTS)));
 
-        printTables(conf);
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("is", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("a", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("new", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("asdf", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("ha*", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("*at", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & !is", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat & new", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("is", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("a", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("new", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this & hat & new", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("ha*", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("*at", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("bat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & bat", EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDelete() throws Exception {
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+
+            URI subject1 = new URIImpl("foo:subj");
+            URI predicate1 = RDFS.LABEL;
+            Value object1 = vf.createLiteral("this is a new hat");
+
+            URI context1 = new URIImpl("foo:context");
+
+            Statement statement1 = vf.createStatement(subject1, predicate1, object1, context1);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement1));
+
+            URI subject2 = new URIImpl("foo:subject");
+            URI predicate2 = RDFS.LABEL;
+            Value object2 = vf.createLiteral("Do you like my new hat?");
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat & new", EMPTY_CONSTRAINTS)));
+            URI context2 = new URIImpl("foo:context");
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this & hat & new", EMPTY_CONSTRAINTS)));
+            Statement statement2 = vf.createStatement(subject2, predicate2, object2, context2);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement2));
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("bat", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & bat", EMPTY_CONSTRAINTS)));
+            f.flush();
 
-        f.close();
+
+            System.out.println("testDelete: BEFORE DELETE");
+            printTables(conf);
+
+            f.deleteStatement(RdfToRyaConversions.convertStatement(statement1));
+            System.out.println("testDelete: AFTER FIRST DELETION");
+            printTables(conf);
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this is a new hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement2), getSet(f.queryText("Do you like my new hat?", EMPTY_CONSTRAINTS)));
+
+            // Check that "new" didn't get deleted from the term table after "this is a new hat"
+            // was deleted since "new" is still in "Do you like my new hat?"
+            Assert.assertEquals(Sets.newHashSet(statement2), getSet(f.queryText("new", EMPTY_CONSTRAINTS)));
+
+            f.deleteStatement(RdfToRyaConversions.convertStatement(statement2));
+            System.out.println("testDelete: AFTER LAST DELETION");
+            printTables(conf);
+
+            System.out.println("testDelete: DONE");
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this is a new hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("Do you like my new hat?", EMPTY_CONSTRAINTS)));
+        }
     }
 
     @Test
     public void testRestrictPredicatesSearch() throws Exception {
         conf.setStrings(ConfigUtils.FREETEXT_PREDICATES_LIST, "pred:1,pred:2");
-        
-        AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer();
-        f.setConf(conf);
 
-        // These should not be stored because they are not in the predicate list
-        f.storeStatement(new RyaStatement(new RyaURI("foo:subj1"), new RyaURI(RDFS.LABEL.toString()), new RyaType("invalid")));
-        f.storeStatement(new RyaStatement(new RyaURI("foo:subj2"), new RyaURI(RDFS.COMMENT.toString()), new RyaType("invalid")));
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
 
-        RyaURI pred1 = new RyaURI("pred:1");
-        RyaURI pred2 = new RyaURI("pred:2");
+            // These should not be stored because they are not in the predicate list
+            f.storeStatement(new RyaStatement(new RyaURI("foo:subj1"), new RyaURI(RDFS.LABEL.toString()), new RyaType("invalid")));
+            f.storeStatement(new RyaStatement(new RyaURI("foo:subj2"), new RyaURI(RDFS.COMMENT.toString()), new RyaType("invalid")));
 
-        // These should be stored because they are in the predicate list
-        RyaStatement s3 = new RyaStatement(new RyaURI("foo:subj3"), pred1, new RyaType("valid"));
-        RyaStatement s4 = new RyaStatement(new RyaURI("foo:subj4"), pred2, new RyaType("valid"));
-        f.storeStatement(s3);
-        f.storeStatement(s4);
+            RyaURI pred1 = new RyaURI("pred:1");
+            RyaURI pred2 = new RyaURI("pred:2");
 
-        // This should not be stored because the object is not a literal
-        f.storeStatement(new RyaStatement(new RyaURI("foo:subj5"), pred1, new RyaURI("in:valid")));
+            // These should be stored because they are in the predicate list
+            RyaStatement s3 = new RyaStatement(new RyaURI("foo:subj3"), pred1, new RyaType("valid"));
+            RyaStatement s4 = new RyaStatement(new RyaURI("foo:subj4"), pred2, new RyaType("valid"));
+            f.storeStatement(s3);
+            f.storeStatement(s4);
 
-        f.flush();
+            // This should not be stored because the object is not a literal
+            f.storeStatement(new RyaStatement(new RyaURI("foo:subj5"), pred1, new RyaURI("in:valid")));
 
-        printTables(conf);
+            f.flush();
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("invalid", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("in:valid", EMPTY_CONSTRAINTS)));
+            printTables(conf);
 
-        Set<Statement> actual = getSet(f.queryText("valid", EMPTY_CONSTRAINTS));
-        Assert.assertEquals(2, actual.size());
-        Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s3)));
-        Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s4)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("invalid", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("in:valid", EMPTY_CONSTRAINTS)));
 
-        f.close();
+            Set<Statement> actual = getSet(f.queryText("valid", EMPTY_CONSTRAINTS));
+            Assert.assertEquals(2, actual.size());
+            Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s3)));
+            Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s4)));
+        }
     }
 
     @Test
     public void testContextSearch() throws Exception {
-        
-        AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        URI subject = new URIImpl("foo:subj");
-        URI predicate = new URIImpl(RDFS.COMMENT.toString());
-        Value object = vf.createLiteral("this is a new hat");
-        URI context = new URIImpl("foo:context");
-
-        Statement statement = vf.createStatement(subject, predicate, object, context);
-        f.storeStatement(RdfToRyaConversions.convertStatement(statement));
-        f.flush();
-
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", new StatementContraints().setContext(context))));
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryText("hat", new StatementContraints().setContext(vf.createURI("foo:context2")))));
-
-        f.close();
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            URI subject = new URIImpl("foo:subj");
+            URI predicate = new URIImpl(RDFS.COMMENT.toString());
+            Value object = vf.createLiteral("this is a new hat");
+            URI context = new URIImpl("foo:context");
+
+            Statement statement = vf.createStatement(subject, predicate, object, context);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement));
+            f.flush();
+
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", new StatementContraints().setContext(context))));
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryText("hat", new StatementContraints().setContext(vf.createURI("foo:context2")))));
+        }
     }
 
     public static void printTables(Configuration conf) throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
@@ -201,7 +246,7 @@ public class AccumuloFreeTextIndexerTest {
         for (String table : tops.list()) {
             System.out.println("Reading : " + table);
             System.out.format(FORMAT, "--Row--", "--ColumnFamily--", "--ColumnQualifier--", "--Value--");
-            Scanner s = ConfigUtils.getConnector(conf).createScanner(table, Constants.NO_AUTHS);
+            Scanner s = ConfigUtils.getConnector(conf).createScanner(table, Authorizations.EMPTY);
             for (Entry<Key, org.apache.accumulo.core.data.Value> entry : s) {
                 Key k = entry.getKey();
                 System.out.format(FORMAT, k.getRow(), k.getColumnFamily(), k.getColumnQualifier(), entry.getValue());


[31/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/java/RyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/java/RyaDirectExample.java b/extras/indexingSailExample/src/main/java/RyaDirectExample.java
deleted file mode 100644
index 947164c..0000000
--- a/extras/indexingSailExample/src/main/java/RyaDirectExample.java
+++ /dev/null
@@ -1,681 +0,0 @@
-
-import java.util.List;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.geo.GeoConstants;
-import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.MutationsRejectedException;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.commons.lang.Validate;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.LiteralImpl;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.QueryResultHandlerException;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.Update;
-import org.openrdf.query.UpdateExecutionException;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.sail.Sail;
-import org.openrdf.sail.SailException;
-
-public class RyaDirectExample {
-    private static final Logger log = Logger.getLogger(RyaDirectExample.class);
-
-    //
-    // Connection configuration parameters
-    //
-
-    private static final boolean USE_MOCK_INSTANCE = true;
-    private static final boolean PRINT_QUERIES = true;
-    private static final String INSTANCE = "instance";
-    private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
-    private static final String AUTHS = "";
-    
-    
-    
-    public static void main(String[] args) throws Exception {
-        Configuration conf = getConf();
-        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-        
-        log.info("Creating the tables as root.");
-//        createTables(addRootConf(conf), conf);
-
-        SailRepository repository = null;
-        SailRepositoryConnection conn = null;
-      
-        try {
-            log.info("Connecting to Indexing Sail Repository.");
-            
-            Sail extSail = RyaSailFactory.getInstance(conf);
-            repository = new SailRepository(extSail);
-            repository.initialize();
-            conn = repository.getConnection();
-            
-            createPCJ(conn);
-
-            long start = System.currentTimeMillis();
-            log.info("Running SPARQL Example: Add and Delete");
-            testAddAndDelete(conn);
-            log.info("Running SAIL/SPARQL Example: PCJ Search");
-            testPCJSearch(conn);
-            log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
-            testAddAndTemporalSearchWithPCJ(conn);
-            log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ");
-            testAddAndFreeTextSearchWithPCJ(conn);
-            log.info("Running SPARQL Example: Add Point and Geo Search with PCJ");
-            testAddPointAndWithinSearchWithPCJ(conn);
-            log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search");
-            testTemporalFreeGeoSearch(conn);
-            log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
-            testGeoFreetextWithPCJSearch(conn);
-
-            log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
-        } finally {
-            log.info("Shutting down");
-            closeQuietly(conn);
-            closeQuietly(repository);
-        }
-    }
-
-    private static void closeQuietly(SailRepository repository) {
-        if (repository != null) {
-            try {
-                repository.shutDown();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    private static void closeQuietly(SailRepositoryConnection conn) {
-        if (conn != null) {
-            try {
-                conn.close();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    private static Configuration getConf() {
-
-        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-
-        conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
-        conf.set(ConfigUtils.USE_PCJ, "true");
-        conf.set(ConfigUtils.USE_GEO, "true");
-        conf.set(ConfigUtils.USE_FREETEXT, "true");
-        conf.set(ConfigUtils.USE_TEMPORAL, "true");
-        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
-        conf.set(ConfigUtils.CLOUDBASE_USER, "root");
-        conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
-        conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
-        conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
-        conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
-
-        // only geo index statements with geo:asWKT predicates
-        conf.set(ConfigUtils.GEO_PREDICATES_LIST, GeoConstants.GEO_AS_WKT.stringValue());
-        return conf;
-    }
-
-    public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
-            RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
-            AccumuloException, AccumuloSecurityException, TableNotFoundException {
-
-        // Add data
-        String query = "INSERT DATA\n"//
-                + "{ GRAPH <http://updated/test> {\n"//
-                + "  <http://acme.com/people/Mike> " //
-                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
-                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
-
-        log.info("Performing Query");
-
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-        update.execute();
-
-        query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
-        CountingResultHandler resultHandler = new CountingResultHandler();
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(resultHandler);
-        log.info("Result count : " + resultHandler.getCount());
-
-        Validate.isTrue(resultHandler.getCount() == 2);
-        resultHandler.resetCount();
-
-        // Delete Data
-        query = "DELETE DATA\n" //
-                + "{ GRAPH <http://updated/test> {\n"
-                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-        update.execute();
-
-        query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(resultHandler);
-        log.info("Result count : " + resultHandler.getCount());
-
-        Validate.isTrue(resultHandler.getCount() == 0);
-    }
-    
-    
-    private static void testPCJSearch(SailRepositoryConnection conn) throws Exception {
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-
-     // ///////////// search for bob
-        queryString = "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-           
-     // ///////////// search for bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?c a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 2);    
-        
-    }
-    
-
-    
-    
-    private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-
-        // create some resources and literals to make statements out of
-
-        String sparqlInsert = "PREFIX time: <http://www.w3.org/2006/time#>\n"
-                + "INSERT DATA {\n" //
-                + "_:eventz       a       time:Instant ;\n"
-                + "     time:inXSDDateTime '2001-01-01T01:01:01-08:00' ;\n" //  one second
-                + "     time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^<http://www.w3.org/2001/XMLSchema#dateTime> ;\n" //   2 seconds
-                + "     time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" //   3 seconds
-                + "     time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" //   4 seconds
-                + "     time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"   
-                + "     time:inXSDDateTime '2006-01-01' ;\n" 
-                + "     time:inXSDDateTime '2007-01-01' ;\n" 
-                + "     time:inXSDDateTime '2008-01-01' ; .\n"
-                + "}";
-
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
-        update.execute();
-
-        // Find all stored dates.
-        String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
-                + "SELECT ?event ?time \n" //
-                + "WHERE { \n"
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "}";//
-       
-        
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        CountingResultHandler tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5);
-        
-        // Find all stored dates.
-        queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
-                + "SELECT ?event ?time \n" //
-                + "WHERE { \n"
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  ?event a  time:Instant . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "}";//
-
-
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5);
-
-
-        // Find all stored dates.
-        queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
-                + "SELECT ?event ?time ?e ?c ?l ?o \n" //
-                + "WHERE { \n"
-                + "  ?e a ?c . \n"//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . \n"//
-                + "  ?e <uri:talksTo> ?o . \n"//
-                + "  ?event a  time:Instant . \n"//
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5);
-    }
-
-
-
-
-
-
-    private static void testAddAndFreeTextSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-        // add data to the repository using the SailRepository add methods
-        ValueFactory f = conn.getValueFactory();
-        URI person = f.createURI("http://example.org/ontology/Person");
-
-        String uuid;
-
-        uuid = "urn:people:alice";
-        conn.add(f.createURI(uuid), RDF.TYPE, person);
-        conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
-
-        uuid = "urn:people:bobss";
-        conn.add(f.createURI(uuid), RDF.TYPE, person);
-        conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-
-        // ///////////// search for alice
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"pal*\")) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        
-
-        // ///////////// search for alice and bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match " //
-                + "{" //
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                  + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 2);
-        
-     // ///////////// search for alice and bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match " //
-                + "{" //
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
-                + "  FILTER(fts:text(?match, \"pal*\")) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        
-        
-        // ///////////// search for bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"!alice & hose\")) " //
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-    }
-
-
-
-    private static void testAddPointAndWithinSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-
-        String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "INSERT DATA { " //
-                + "  <urn:feature> a geo:Feature ; " //
-                + "    geo:hasGeometry [ " //
-                + "      a geo:Point ; " //
-                + "      geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
-                + "    ] . " //
-                + "}";
-
-        Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
-        u.execute();
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-        
-        // point outside search ring
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt " //
-                + "{" //
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 0);
-        
-        // point inside search ring
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
-                + "{" //
-                + "  ?feature a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-         
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        
-             
-        // point inside search ring with Pre-Computed Join
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
-                + "{" //
-                + "  ?feature a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-         
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs
-
-        // point outside search ring with PCJ
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?l ?o " //
-                + "{" //
-                + "  ?feature a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 0);
-        
-        // point inside search ring with different Pre-Computed Join
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-    }
-    
-    
-    private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
-    RepositoryException, UpdateExecutionException, TupleQueryResultHandlerException, QueryEvaluationException {
-        
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-
-        // ring containing point
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "PREFIX time: <http://www.w3.org/2006/time#> "//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> "//
-                + "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?feature ?point ?wkt ?event ?time ?person ?match" //
-                + "{" //
-                + "  ?event a  time:Instant . \n"//
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)). " //
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"pal*\")) " //
-                + "}";//
-        
-        
-        
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5); 
-        
-    }
-    
-    
-    
-    private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
-    RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException {
-     // ring outside point
-        String queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o ?person ?match " //
-                + "{" //
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"!alice & hose\")) " //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        CountingResultHandler tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-    }
-    
-    
-    
-    private static void createPCJ(SailRepositoryConnection conn) 
-            throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException {
-        
-        String queryString1 = ""//
-                + "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?c a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-        
-        String queryString2 = ""//
-                + "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-        
-        
-        URI obj,subclass,talksTo;
-        URI person = new URIImpl("urn:people:alice");
-        URI feature = new URIImpl("urn:feature");
-        URI sub = new URIImpl("uri:entity");
-        subclass = new URIImpl("uri:class");
-        obj = new URIImpl("uri:obj");
-        talksTo = new URIImpl("uri:talksTo");
-
-        conn.add(person, RDF.TYPE, sub);
-        conn.add(feature, RDF.TYPE, sub);
-        conn.add(sub, RDF.TYPE, subclass);
-        conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
-        conn.add(sub, talksTo, obj);
-       
-        AccumuloIndexSet ais1 = null; 
-        AccumuloIndexSet ais2 = null; 
-        String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
-        String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
-
-        Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes()));
-        accCon.tableOperations().create(tablename1);
-        accCon.tableOperations().create(tablename2);
-        
-        try {
-            ais1 = new AccumuloIndexSet(queryString1, conn, accCon, tablename1);
-            ais2 = new AccumuloIndexSet(queryString2, conn, accCon, tablename2);
-        } catch (MalformedQueryException e) {
-            e.printStackTrace();
-        } catch (SailException e) {
-            e.printStackTrace();
-        } catch (QueryEvaluationException e) {
-            e.printStackTrace();
-        } catch (MutationsRejectedException e) {
-            e.printStackTrace();
-        } catch (TableNotFoundException e) {
-            e.printStackTrace();
-        }
-        
-    }
-    
-
-    private static class CountingResultHandler implements TupleQueryResultHandler {
-        private int count = 0;
-
-        public int getCount() {
-            return count;
-        }
-
-        public void resetCount() {
-            this.count = 0;
-        }
-
-        @Override
-        public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
-            count++;
-            System.out.println(arg0);
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-
-        @Override
-        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/scripts/RunRyaDirectExample.bat
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/scripts/RunRyaDirectExample.bat b/extras/indexingSailExample/src/main/scripts/RunRyaDirectExample.bat
deleted file mode 100644
index 3a75f71..0000000
--- a/extras/indexingSailExample/src/main/scripts/RunRyaDirectExample.bat
+++ /dev/null
@@ -1,25 +0,0 @@
-@echo off
-SET CP=
-
-REM Check to see if javac is on the path
-where /Q javac
-IF %ERRORLEVEL% NEQ 0 goto :NO_JAVAC
-
-
-for /f %%f in ('DIR /b .\lib\*.jar') do call :append .\lib\%%f
-
-javac -cp "%CP%" RyaDirectExample.java
-java -cp "%CP%" RyaDirectExample
-
-goto :end
-
-:append
-@echo off
-SET CP=%CP%%1;
-goto :end
-
-:NO_JAVAC
-echo ERROR: Could not find javac
-goto :end
-
-:end
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/pom.xml
----------------------------------------------------------------------
diff --git a/extras/pom.xml b/extras/pom.xml
index b864a76..f3a88d3 100644
--- a/extras/pom.xml
+++ b/extras/pom.xml
@@ -1,27 +1,43 @@
 <?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.extras</artifactId>
+    <name>Apache Rya Extra Projects</name>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <modules>
         <module>rya.prospector</module>
         <module>rya.manual</module>
         <module>tinkerpop.rya</module>
+        <module>rya.console</module>
+        <module>indexing</module>
+        <module>indexingExample</module>
     </modules>
-    <profiles>
-        <profile>
-            <id>indexing</id>
-            <modules>
-                <module>indexing</module>
-                <module>indexingSailExample</module>
-            </modules>
-        </profile>
-    </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.console/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.console/pom.xml b/extras/rya.console/pom.xml
index adb4997..1bbb5a0 100644
--- a/extras/rya.console/pom.xml
+++ b/extras/rya.console/pom.xml
@@ -1,57 +1,65 @@
+<?xml version='1.0'?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
-
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.extras</artifactId>
-        <version>3.2.5-SNAPSHOT</version>
+        <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <artifactId>rya.console</artifactId>
-    <packaging>jar</packaging>
 
-    <url>http://maven.apache.org</url>
+    <artifactId>rya.console</artifactId>
+    <name>Apache Rya Console</name>
 
     <properties>
-        <spring.shell.version>1.0.0.M1</spring.shell.version>
         <jar.mainclass>org.springframework.shell.Bootstrap</jar.mainclass>
     </properties>
 
     <dependencies>
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>rya.api</artifactId>
         </dependency>
-	   <dependency>
-            <groupId>mvm.rya</groupId>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
             <artifactId>accumulo.rya</artifactId>
-	        <exclusions>
+            <exclusions>
                 <exclusion>
                     <groupId>jline</groupId>
                     <artifactId>jline</artifactId>
                 </exclusion>
             </exclusions>
         </dependency>
+
         <dependency>
             <groupId>org.springframework.shell</groupId>
             <artifactId>spring-shell</artifactId>
-            <version>${spring.shell.version}</version>
         </dependency>
-
     </dependencies>
 
     <build>
         <plugins>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <configuration>
-                    <source>1.5</source>
-                    <target>1.5</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-dependency-plugin</artifactId>
                 <executions>
                     <execution>
@@ -76,7 +84,7 @@
                     <archive>
                         <manifest>
                             <addClasspath>true</addClasspath>
-                            <!--<useUniqueVersions>false</useUniqueVersions>-->
+                            <!--<useUniqueVersions>false</useUniqueVersions> -->
                             <classpathPrefix>lib/</classpathPrefix>
                             <mainClass>${jar.mainclass}</mainClass>
                         </manifest>
@@ -89,58 +97,4 @@
         </plugins>
 
     </build>
-
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>accumulo.iterators</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>cloudbase.iterators</artifactId>
-                </dependency>
-            </dependencies>
-        </profile>
-		
-    </profiles>
-
-    <repositories>
-        <!-- jline 1.0.S2-B is here http://shrub.appspot.com/spring-roo-repository.springsource.org/release/net/sourceforge/jline/jline/1.0.S2-B/ -->
-        <repository>
-            <id>spring-roo-repository</id>
-            <name>Spring Roo Maven Repository</name>
-            <url>http://spring-roo-repository.springsource.org/release</url>
-        </repository>
-
-        <repository>
-            <id>spring-maven-snapshot</id>
-            <snapshots>
-                <enabled>true</enabled>
-            </snapshots>
-            <name>Springframework Maven SNAPSHOT Repository</name>
-            <url>http://repo.springsource.org/libs-snapshot</url>
-        </repository>
-    </repositories>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java
index 23c028c..2d0fac8 100644
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java
+++ b/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java
@@ -1,26 +1,31 @@
+package mvm.rya.console;
+
 /*
- * Copyright 2011-2012 the original author or authors.
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
-package mvm.rya.console;
+
 
 import org.springframework.core.Ordered;
 import org.springframework.core.annotation.Order;
-import org.springframework.shell.support.util.StringUtils;
 import org.springframework.shell.core.CommandMarker;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.plugin.support.DefaultBannerProvider;
+import org.springframework.shell.support.util.OsUtils;
 import org.springframework.stereotype.Component;
 
 /**
@@ -32,6 +37,7 @@ public class RyaBannerProvider extends DefaultBannerProvider
         implements CommandMarker {
 
     @CliCommand(value = {"version"}, help = "Displays current CLI version")
+    @Override
     public String getBanner() {
         StringBuffer buf = new StringBuffer();
         buf.append("" +
@@ -40,22 +46,24 @@ public class RyaBannerProvider extends DefaultBannerProvider
                 "__  /_/ /_  / / /  __ `/    _  /    _  __ \\_  __ \\_  ___/  __ \\_  /_  _ \\\n" +
                 "_  _, _/_  /_/ // /_/ /     / /___  / /_/ /  / / /(__  )/ /_/ /  / /  __/\n" +
                 "/_/ |_| _\\__, / \\__,_/      \\____/  \\____//_/ /_//____/ \\____//_/  \\___/ \n" +
-                "        /____/ " + StringUtils.LINE_SEPARATOR);
+                "        /____/ " + OsUtils.LINE_SEPARATOR);
         buf.append("Version:" + this.getVersion());
         return buf.toString();
 
     }
 
+    @Override
     public String getVersion() {
         return "3.0.0";
     }
 
+    @Override
     public String getWelcomeMessage() {
         return "Welcome to the Rya Console";
     }
 
     @Override
-    public String name() {
+    public String getProviderName() {
         return "rya";
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java
index 2882b21..3f63b20 100644
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java
+++ b/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java
@@ -1,5 +1,24 @@
 package mvm.rya.console;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 
 import info.aduna.iteration.CloseableIteration;
 
@@ -208,4 +227,4 @@ public class RyaConsoleCommands implements CommandMarker {
     public void setRyaDAO(RyaDAO ryaDAO) {
         this.ryaDAO = ryaDAO;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java
index e09bda6..97182aa 100644
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java
+++ b/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java
@@ -1,20 +1,24 @@
+package mvm.rya.console;
+
 /*
- * Copyright 2011-2012 the original author or authors.
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
-package mvm.rya.console;
 
 import org.springframework.core.Ordered;
 import org.springframework.core.annotation.Order;
@@ -30,12 +34,13 @@ import org.springframework.stereotype.Component;
 @Order(Ordered.HIGHEST_PRECEDENCE)
 public class RyaHistoryFileNameProvider extends DefaultHistoryFileNameProvider{
 
+    @Override
 	public String getHistoryFileName() {
 		return "ryaconsole.log";
 	}
 
 	@Override
-	public String name() {
+	public String getProviderName() {
 		return "Rya Console History Log";
 	}
 	

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java
index 90c9199..b199819 100644
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java
+++ b/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java
@@ -1,19 +1,24 @@
+package mvm.rya.console;
+
 /*
- * Copyright 2011-2012 the original author or authors.
- * 
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
-package mvm.rya.console;
+
 
 import org.springframework.core.Ordered;
 import org.springframework.core.annotation.Order;
@@ -35,7 +40,7 @@ public class RyaPromptProvider extends DefaultPromptProvider {
 
 	
 	@Override
-	public String name() {
+	public String getProviderName() {
 		return "Rya Console Prompt";
 	}
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml b/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml
index d21543d..e593a48 100644
--- a/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml
+++ b/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml
@@ -1,4 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <beans xmlns="http://www.springframework.org/schema/beans"
 	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 	xmlns:context="http://www.springframework.org/schema/context"
@@ -7,4 +27,4 @@
 
 	<context:component-scan base-package="mvm.rya.console" />
 
-</beans>
\ No newline at end of file
+</beans>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.geo/pom.xml b/extras/rya.geo/pom.xml
deleted file mode 100644
index c468158..0000000
--- a/extras/rya.geo/pom.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-
-	<parent>
-		<groupId>mvm.rya</groupId>
-		<artifactId>rya.extras</artifactId>
-		<version>3.2.10-SNAPSHOT</version>
-	</parent>
-
-	<artifactId>rya.geo</artifactId>
-	
-	<dependencies>
-		<dependency>
-			<groupId>mvm.rya</groupId>
-			<artifactId>rya.api</artifactId>
-		</dependency>
-		<dependency>
-			<groupId>org.openrdf.sesame</groupId>
-			<artifactId>sesame-queryalgebra-evaluation</artifactId>
-			<version>${openrdf.sesame.version}</version>
-		</dependency>
-	</dependencies>
-	
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/main/java/mvm/rya/geo/GeoDistance.java
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/main/java/mvm/rya/geo/GeoDistance.java b/extras/rya.geo/src/main/java/mvm/rya/geo/GeoDistance.java
deleted file mode 100644
index 277d7f6..0000000
--- a/extras/rya.geo/src/main/java/mvm/rya/geo/GeoDistance.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package mvm.rya.geo;
-
-/**
- * Distance functions for geographic points
- */
-public class GeoDistance
-{
-	private static final double EARTH_RADIUS_KM = 6366.0;
-	private static final double DEG2RAD = Math.PI / 180;
-
-	/**
-	 * Calculates distance between two geographic points in km
-	 * 
-	 * @param lat1
-	 * @param lon1
-	 * @param lat2
-	 * @param lon2
-	 * @return distance in kilometers
-	 */
-	public static double calculate(double lat1, double lon1, double lat2, double lon2)
-	{
-		double a1 = lat1 * DEG2RAD;
-		double a2 = lon1 * DEG2RAD;
-		double b1 = lat2 * DEG2RAD;
-		double b2 = lon2 * DEG2RAD;
-
-		double t1 = Math.cos(a1) * Math.cos(a2) * Math.cos(b1) * Math.cos(b2);
-		double t2 = Math.cos(a1) * Math.sin(a2) * Math.cos(b1) * Math.sin(b2);
-		double t3 = Math.sin(a1) * Math.sin(b1);
-		double tt = Math.acos(t1 + t2 + t3);
-
-		return EARTH_RADIUS_KM * tt;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/main/java/mvm/rya/geo/GeoRyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/main/java/mvm/rya/geo/GeoRyaTypeResolver.java b/extras/rya.geo/src/main/java/mvm/rya/geo/GeoRyaTypeResolver.java
deleted file mode 100644
index 1d31c0b..0000000
--- a/extras/rya.geo/src/main/java/mvm/rya/geo/GeoRyaTypeResolver.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package mvm.rya.geo;
-
-import mvm.rya.api.resolver.impl.RyaTypeResolverImpl;
-
-/**
- * Type resolver for rya geo location type
- */
-public class GeoRyaTypeResolver extends RyaTypeResolverImpl
-{
-	public static final int GEO_LITERAL_MARKER = 11;
-
-	public GeoRyaTypeResolver()
-	{
-		super((byte) GEO_LITERAL_MARKER, RyaGeoSchema.GEOPOINT);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/main/java/mvm/rya/geo/RyaGeoSchema.java
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/main/java/mvm/rya/geo/RyaGeoSchema.java b/extras/rya.geo/src/main/java/mvm/rya/geo/RyaGeoSchema.java
deleted file mode 100644
index 06befc2..0000000
--- a/extras/rya.geo/src/main/java/mvm/rya/geo/RyaGeoSchema.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package mvm.rya.geo;
-
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-/**
- * Rya GEO RDF Constants
- */
-public class RyaGeoSchema
-{
-	private static final ValueFactory VF = ValueFactoryImpl.getInstance();
-
-	public static final URI NAMESPACE = VF.createURI("urn:mvm.rya/geo#");
-	public static final URI GEOPOINT = VF.createURI(NAMESPACE.toString(), "geopoint");
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/main/java/mvm/rya/geo/Verify.java
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/main/java/mvm/rya/geo/Verify.java b/extras/rya.geo/src/main/java/mvm/rya/geo/Verify.java
deleted file mode 100644
index 752793e..0000000
--- a/extras/rya.geo/src/main/java/mvm/rya/geo/Verify.java
+++ /dev/null
@@ -1,68 +0,0 @@
-package mvm.rya.geo;
-
-import org.openrdf.model.Literal;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
-
-/**
- * Utility for verifying function arguments
- */
-public class Verify
-{
-	private final Value[] args;
-
-	/**
-	 * Entry point for creating a Verify
-	 * 
-	 * @param args
-	 * @return verify
-	 */
-	public static Verify that(Value... args)
-	{
-		return new Verify(args);
-	}
-
-	private Verify(Value... args)
-	{
-		this.args = args;
-	}
-
-	/**
-	 * verifies the number of arguments
-	 * 
-	 * @param numArgs
-	 * @throws ValueExprEvaluationException
-	 */
-	public void hasLength(int numArgs) throws ValueExprEvaluationException
-	{
-		if (args.length != numArgs)
-		{
-			throw new ValueExprEvaluationException("expected " + numArgs + " but received " + args.length);
-		}
-	}
-
-	/**
-	 * verifies the arguments are of the specified type
-	 * 
-	 * @param type
-	 * @throws ValueExprEvaluationException
-	 */
-	public void isLiteralOfType(URI type) throws ValueExprEvaluationException
-	{
-		for (Value arg : args)
-		{
-			if (!(arg instanceof Literal))
-			{
-				throw new ValueExprEvaluationException(arg + " is not a literal");
-			}
-
-			Literal l = (Literal) arg;
-
-			if (!type.equals(l.getDatatype()))
-			{
-				throw new ValueExprEvaluationException("expected type " + type + " but received " + l.getDatatype());
-			}
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/main/java/mvm/rya/geo/WithinRange.java
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/main/java/mvm/rya/geo/WithinRange.java b/extras/rya.geo/src/main/java/mvm/rya/geo/WithinRange.java
deleted file mode 100644
index 55bce4a..0000000
--- a/extras/rya.geo/src/main/java/mvm/rya/geo/WithinRange.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package mvm.rya.geo;
-
-import java.util.Arrays;
-
-import org.openrdf.model.Value;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
-import org.openrdf.query.algebra.evaluation.function.Function;
-
-/**
- * Custom function for check a lat/lon is ithin a certain range of another
- * 
- * Example SPARQL Usage:
- * 
- * <pre>
- * # Give me all cities that are within 50 km of lat/lon 20.00,-30.00
- * 
- * PREFIX geo: <urn:mvm.rya/geo#>
- * SELECT ?city
- * WHERE 
- * {
- *   ?city geo:locatedAt ?latLon .
- *   FILTER( geo:withinRange(?latLon, "20.00,-30.00"^^geo:geopoint, 50 )
- * }
- * </pre>
- */
-public class WithinRange implements Function
-{
-	private static final String FUN_NAME = "withinRange";
-
-	@Override
-	public Value evaluate(ValueFactory vf, Value... args) throws ValueExprEvaluationException
-	{
-		System.out.println("running with args: " + Arrays.toString(args));
-
-		Verify.that(args).hasLength(3);
-		Verify.that(args[0], args[1]).isLiteralOfType(RyaGeoSchema.GEOPOINT);
-
-		GeoPoint testPt = new GeoPoint(args[0]);
-		GeoPoint targetPt = new GeoPoint(args[1]);
-		double radius = Double.parseDouble(args[2].stringValue());
-
-		double dist = GeoDistance.calculate(testPt.lat, testPt.lon, targetPt.lat, targetPt.lon);
-
-		System.out.println("distance from (" + testPt.lat + "," + testPt.lon + ") to (" + targetPt.lat + "," + targetPt.lon
-				+ ") is " + dist);
-
-		return vf.createLiteral(dist <= radius);
-	}
-
-	@Override
-	public String getURI()
-	{
-		return RyaGeoSchema.NAMESPACE.toString() + FUN_NAME;
-	}
-
-	private class GeoPoint
-	{
-		public double lat;
-		public double lon;
-
-		public GeoPoint(Value val)
-		{
-			String[] tokens = val.stringValue().split(",");
-			lat = Double.parseDouble(tokens[0]);
-			lon = Double.parseDouble(tokens[1]);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/main/resources/META-INF/services/mvm.rya.api.resolver.RyaTypeResolver
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/main/resources/META-INF/services/mvm.rya.api.resolver.RyaTypeResolver b/extras/rya.geo/src/main/resources/META-INF/services/mvm.rya.api.resolver.RyaTypeResolver
deleted file mode 100644
index 028b525..0000000
--- a/extras/rya.geo/src/main/resources/META-INF/services/mvm.rya.api.resolver.RyaTypeResolver
+++ /dev/null
@@ -1 +0,0 @@
-mvm.rya.geo.GeoRyaTypeResolver
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function b/extras/rya.geo/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function
deleted file mode 100644
index 5f853fe..0000000
--- a/extras/rya.geo/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function
+++ /dev/null
@@ -1 +0,0 @@
-mvm.rya.geo.WithinRange
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/test/java/mvm/rya/geo/GeoRyaTypeResolverTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/test/java/mvm/rya/geo/GeoRyaTypeResolverTest.java b/extras/rya.geo/src/test/java/mvm/rya/geo/GeoRyaTypeResolverTest.java
deleted file mode 100644
index c3284c7..0000000
--- a/extras/rya.geo/src/test/java/mvm/rya/geo/GeoRyaTypeResolverTest.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package mvm.rya.geo;
-
-import mvm.rya.api.domain.RyaType;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-public class GeoRyaTypeResolverTest
-{
-	private final GeoRyaTypeResolver resolver = new GeoRyaTypeResolver();
-
-	@Test
-	public void testSerialization_andBack() throws Exception
-	{
-		String latLon = "20.00,30.00";
-		RyaType orig = new RyaType(RyaGeoSchema.GEOPOINT, latLon);
-
-		byte[] bytes = resolver.serialize(orig);
-		RyaType copy = resolver.deserialize(bytes);
-
-		Assert.assertEquals(latLon, copy.getData());
-		Assert.assertEquals(orig, copy);
-		Assert.assertEquals(RyaGeoSchema.GEOPOINT, copy.getDataType());
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.geo/src/test/java/mvm/rya/geo/WithinRangeTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.geo/src/test/java/mvm/rya/geo/WithinRangeTest.java b/extras/rya.geo/src/test/java/mvm/rya/geo/WithinRangeTest.java
deleted file mode 100644
index 2e1c2a7..0000000
--- a/extras/rya.geo/src/test/java/mvm/rya/geo/WithinRangeTest.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package mvm.rya.geo;
-
-import org.junit.Assert;
-import org.junit.Test;
-import org.openrdf.model.Value;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
-
-public class WithinRangeTest
-{
-	private static final double MI2KM = 1.60934;
-
-	private static final ValueFactory VF = ValueFactoryImpl.getInstance();
-	private static final Value TRUE = VF.createLiteral(true);
-	private static final Value FALSE = VF.createLiteral(false);
-
-	// Distance between Washington, DC and Atlanta is roughly 600 miles
-	private static final Value WASHINGTON_DC = VF.createLiteral("40.15999984741211,-80.25", RyaGeoSchema.GEOPOINT);
-	private static final Value ATLANTA = VF.createLiteral("33.75,-84.383", RyaGeoSchema.GEOPOINT);
-
-	private WithinRange fun = new WithinRange();
-
-	@Test
-	public void testWithinRange() throws ValueExprEvaluationException
-	{
-		double miles = 900;
-		Value distance = VF.createLiteral(miles * MI2KM);
-
-		Assert.assertEquals(TRUE, fun.evaluate(VF, ATLANTA, WASHINGTON_DC, distance));
-		Assert.assertEquals(TRUE, fun.evaluate(VF, WASHINGTON_DC, WASHINGTON_DC, distance));
-	}
-
-	@Test
-	public void testWithinRange_notWithinRange() throws ValueExprEvaluationException
-	{
-		double miles = 200;
-		Value distance = VF.createLiteral(miles * MI2KM);
-
-		Assert.assertEquals(FALSE, fun.evaluate(VF, ATLANTA, WASHINGTON_DC, distance));
-		Assert.assertEquals(TRUE, fun.evaluate(VF, WASHINGTON_DC, WASHINGTON_DC, distance));
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.manual/pom.xml b/extras/rya.manual/pom.xml
index f79359a..75c106a 100644
--- a/extras/rya.manual/pom.xml
+++ b/extras/rya.manual/pom.xml
@@ -1,17 +1,34 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
 
-    <modelVersion>4.0.0</modelVersion>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
 
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.extras</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
 
     <artifactId>rya.manual</artifactId>
-    <name>RYA Manual</name>
-    <packaging>jar</packaging>
+    <name>Apache Rya Manual</name>
 
     <build>
         <plugins>
@@ -28,7 +45,7 @@
                 <configuration>
                     <inputEncoding>UTF-8</inputEncoding>
                     <outputEncoding>UTF-8</outputEncoding>
-                </configuration> 
+                </configuration>
             </plugin>
         </plugins>
     </build>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/_index.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/_index.md b/extras/rya.manual/src/site/markdown/_index.md
index 184b94f..bf030a3 100644
--- a/extras/rya.manual/src/site/markdown/_index.md
+++ b/extras/rya.manual/src/site/markdown/_index.md
@@ -1,4 +1,25 @@
 
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
+
 # Rya
 - [Overview](overview.md)
 - [Quick Start](quickstart.md)

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/alx.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/alx.md b/extras/rya.manual/src/site/markdown/alx.md
index 78a4c8e..2d0eae7 100644
--- a/extras/rya.manual/src/site/markdown/alx.md
+++ b/extras/rya.manual/src/site/markdown/alx.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Alx Rya Integration
 
 Alx is a modular framework for developing applications. Rya has mechanisms to integrate directly into Alx to provide other modules access to queries.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/build-source.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/build-source.md b/extras/rya.manual/src/site/markdown/build-source.md
index e811622..07f0cb5 100644
--- a/extras/rya.manual/src/site/markdown/build-source.md
+++ b/extras/rya.manual/src/site/markdown/build-source.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Building from Source
 
 ## Prerequisites

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/eval.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/eval.md b/extras/rya.manual/src/site/markdown/eval.md
index 8a40389..fc4095b 100644
--- a/extras/rya.manual/src/site/markdown/eval.md
+++ b/extras/rya.manual/src/site/markdown/eval.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Prospects Table
 
 The Prospects Table provides statistics on the number of subject/predicate/object data found in the triple store. It is currently a

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/index.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/index.md b/extras/rya.manual/src/site/markdown/index.md
index aa49e3b..0748284 100644
--- a/extras/rya.manual/src/site/markdown/index.md
+++ b/extras/rya.manual/src/site/markdown/index.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Rya
 
 This project contains documentation about the Rya, a scalable RDF triple store on top of Accumulo.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/infer.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/infer.md b/extras/rya.manual/src/site/markdown/infer.md
index ee769c5..35b6f14 100644
--- a/extras/rya.manual/src/site/markdown/infer.md
+++ b/extras/rya.manual/src/site/markdown/infer.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Inferencing
 
 The current inferencing set supported includes:

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md b/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md
index 472b409..220cf03 100644
--- a/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md
+++ b/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Load Pre-computed Join
 
 A tool has been created to load a pre-computed join.  This tool will generate an index to support a pre-computed join on a user provided SPARQL query, and then register that query within Rya.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/loaddata.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/loaddata.md b/extras/rya.manual/src/site/markdown/loaddata.md
index 3a66d6a..2c6bc00 100644
--- a/extras/rya.manual/src/site/markdown/loaddata.md
+++ b/extras/rya.manual/src/site/markdown/loaddata.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Load Data
 
 There are a few mechanisms to load data

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/overview.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/overview.md b/extras/rya.manual/src/site/markdown/overview.md
index 546530f..068bd57 100644
--- a/extras/rya.manual/src/site/markdown/overview.md
+++ b/extras/rya.manual/src/site/markdown/overview.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Overview
 
 RYA is a scalable RDF Store that is built on top of a Columnar Index Store (such as Accumulo). It is implemented as an extension to OpenRdf to provide easy query mechanisms (SPARQL, SERQL, etc) and Rdf data storage (RDF/XML, NTriples, etc).

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/querydata.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/querydata.md b/extras/rya.manual/src/site/markdown/querydata.md
index 70f3045..a7e2a6d 100644
--- a/extras/rya.manual/src/site/markdown/querydata.md
+++ b/extras/rya.manual/src/site/markdown/querydata.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Query Data
 
 There are a few mechanisms to query data

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/quickstart.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/quickstart.md b/extras/rya.manual/src/site/markdown/quickstart.md
index 52bc111..4f0aa05 100644
--- a/extras/rya.manual/src/site/markdown/quickstart.md
+++ b/extras/rya.manual/src/site/markdown/quickstart.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Quick Start
 
 This tutorial will outline the steps needed to get quickly started with the Rya store using the web based endpoint.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/sm-addauth.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/sm-addauth.md b/extras/rya.manual/src/site/markdown/sm-addauth.md
index aadef07..2f32422 100644
--- a/extras/rya.manual/src/site/markdown/sm-addauth.md
+++ b/extras/rya.manual/src/site/markdown/sm-addauth.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Add Authentication
 
 This tutorial will give a few examples on how to load and query data with authentication.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/rya.manual/src/site/markdown/sm-firststeps.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/sm-firststeps.md b/extras/rya.manual/src/site/markdown/sm-firststeps.md
index c08c035..34f995b 100644
--- a/extras/rya.manual/src/site/markdown/sm-firststeps.md
+++ b/extras/rya.manual/src/site/markdown/sm-firststeps.md
@@ -1,3 +1,24 @@
+
+<!--
+
+[comment]: # Licensed to the Apache Software Foundation (ASF) under one
+[comment]: # or more contributor license agreements.  See the NOTICE file
+[comment]: # distributed with this work for additional information
+[comment]: # regarding copyright ownership.  The ASF licenses this file
+[comment]: # to you under the Apache License, Version 2.0 (the
+[comment]: # "License"); you may not use this file except in compliance
+[comment]: # with the License.  You may obtain a copy of the License at
+[comment]: # 
+[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
+[comment]: # 
+[comment]: # Unless required by applicable law or agreed to in writing,
+[comment]: # software distributed under the License is distributed on an
+[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+[comment]: # KIND, either express or implied.  See the License for the
+[comment]: # specific language governing permissions and limitations
+[comment]: # under the License.
+
+-->
 # Typical First Steps
 
 In this tutorial, I will give you a quick overview of some of the first steps I perform to get data loaded and read for query.



[48/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java
index 2db642f..d394417 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypePrefix.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.URI;
 
 import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java
index 51b5a6f..a744399 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaTypeRange.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.URI;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java
index a8e5391..aa174c5 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURI.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.URI;
 import org.openrdf.model.util.URIUtil;
 import org.openrdf.model.vocabulary.XMLSchema;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java
index d6cff02..f808607 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIPrefix.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java
index 3a00528..2c2b836 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/RyaURIRange.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Date: 7/17/12
  * Time: 9:59 AM

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java b/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java
index 50ab395..13d82da 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/domain/utils/RyaStatementWritable.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.domain.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java
index 5e5007d..61732d3 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/layout/TableLayoutStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.layout;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Created by IntelliJ IDEA.
  * Date: 4/25/12

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java
index 63e6875..0e995ab 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/layout/TablePrefixLayoutStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.layout;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java
index c3fe211..54444d4 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/RdfDAOException.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Class RdfDAOException
  * Date: Feb 28, 2012

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java
index 83bd12e..020464b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/RdfEvalStatsDAO.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.List;
 
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java
index e9c7314..00c246e 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaConfigured.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java
index 055b188..e326f7d 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAO.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.Iterator;
 
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java
index 977a625..2322119 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaDAOException.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Date: 7/17/12
  * Time: 8:20 AM

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java
index 284449d..77cd4bd 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/RyaNamespaceManager.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import org.openrdf.model.Namespace;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java b/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java
index d30790e..8c827c1 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/index/RyaSecondaryIndexer.java
@@ -1,5 +1,25 @@
 package mvm.rya.api.persist.index;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.Closeable;
 import java.io.Flushable;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java b/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java
index cc4a119..28f797b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/joinselect/SelectivityEvalDAO.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.joinselect;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 - 2015 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.persist.RdfEvalStatsDAO;
 import org.openrdf.query.algebra.StatementPattern;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java
index 4581504..113ce51 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/BatchRyaQuery.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Iterables;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java
index 392422c..5235989 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQuery.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import mvm.rya.api.domain.RyaStatement;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java
index fb90909..7454eea 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryEngine.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java
index 5ec9732..c77796e 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/RyaQueryOptions.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java
index 49c1a15..286ea7a 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/HashJoin.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query.join;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.RdfCloudTripleStoreUtils;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java
index 8b1624b..3cb48a5 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/IterativeJoin.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query.join;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import info.aduna.iteration.CloseableIteration;
 import info.aduna.iteration.ConvertingIteration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java
index cbd6d45..775af53 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/Join.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query.join;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.domain.RyaStatement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java
index 42d8c9d..1dfcbf1 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/query/join/MergeJoin.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.query.join;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import info.aduna.iteration.CloseableIteration;
 import info.aduna.iteration.EmptyIteration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java b/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java
index 7f8abf0..81f42b4 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/persist/utils/RyaDAOHelper.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.persist.utils;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import info.aduna.iteration.CloseableIteration;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.RdfCloudTripleStoreUtils;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java
index 0005480..5171feb 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/AbstractTriplePatternStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import mvm.rya.api.RdfCloudTripleStoreConstants;
 import mvm.rya.api.resolver.RyaContext;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java
index 1088ec7..6ebc722 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/ByteRange.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Date: 1/10/13
  * Time: 12:47 PM

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java
index 2ea3d97..7b7eb39 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/TriplePatternStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaURI;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
index 655d7da..04d81ce 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy.wholerow;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
index aa1df79..b7204a9 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy.wholerow;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.primitives.Bytes;
 
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
index 5f58898..24f5852 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy.wholerow;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import static mvm.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
index 087c996..3f050e0 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy.wholerow;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.primitives.Bytes;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.RdfCloudTripleStoreConstants;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
index 6f41d5e..2b91a4b 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.query.strategy.wholerow;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.primitives.Bytes;
 import mvm.rya.api.RdfCloudTripleStoreConfiguration;
 import mvm.rya.api.RdfCloudTripleStoreUtils;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java
index 80d7cba..3a6d125 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/CustomRyaTypeResolverMapping.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.URI;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java
index bd5a236..485bf0d 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/RdfToRyaConversions.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.domain.*;
 import org.openrdf.model.*;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java
index 7398114..2b97e1c 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaContext.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java
index b746aa5..a30d250 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaToRdfConversions.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaURI;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java
index 64360f3..b3c244e 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTripleContext.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java
index 53fbce8..5b1cd20 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolver.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import mvm.rya.api.domain.RyaRange;
 import mvm.rya.api.domain.RyaType;
 import mvm.rya.api.domain.RyaTypeRange;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java
index 0bc7147..45f874c 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverException.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 /**
  * Date: 7/16/12
  * Time: 12:09 PM

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java
index 1cf7ea2..0c7a30a 100644
--- a/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java
+++ b/common/rya.api/src/main/java/mvm/rya/api/resolver/RyaTypeResolverMapping.java
@@ -1,25 +1,26 @@
 package mvm.rya.api.resolver;
 
 /*
- * #%L
- * mvm.rya.rya.api
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.URI;
 
 /**



[32/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/src/main/java/RyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/RyaDirectExample.java b/extras/indexingExample/src/main/java/RyaDirectExample.java
new file mode 100644
index 0000000..b3e8dae
--- /dev/null
+++ b/extras/indexingExample/src/main/java/RyaDirectExample.java
@@ -0,0 +1,700 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+import java.util.List;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.geo.GeoConstants;
+import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.MutationsRejectedException;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.commons.lang.Validate;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.openrdf.model.URI;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.LiteralImpl;
+import org.openrdf.model.impl.URIImpl;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.Update;
+import org.openrdf.query.UpdateExecutionException;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.sail.Sail;
+import org.openrdf.sail.SailException;
+
+public class RyaDirectExample {
+    private static final Logger log = Logger.getLogger(RyaDirectExample.class);
+
+    //
+    // Connection configuration parameters
+    //
+
+    private static final boolean USE_MOCK_INSTANCE = true;
+    private static final boolean PRINT_QUERIES = true;
+    private static final String INSTANCE = "instance";
+    private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
+    private static final String AUTHS = "";
+    
+    
+    
+    public static void main(String[] args) throws Exception {
+        Configuration conf = getConf();
+        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
+        
+        log.info("Creating the tables as root.");
+//        createTables(addRootConf(conf), conf);
+
+        SailRepository repository = null;
+        SailRepositoryConnection conn = null;
+      
+        try {
+            log.info("Connecting to Indexing Sail Repository.");
+            
+            Sail extSail = RyaSailFactory.getInstance(conf);
+            repository = new SailRepository(extSail);
+            repository.initialize();
+            conn = repository.getConnection();
+            
+            createPCJ(conn);
+
+            long start = System.currentTimeMillis();
+            log.info("Running SPARQL Example: Add and Delete");
+            testAddAndDelete(conn);
+            log.info("Running SAIL/SPARQL Example: PCJ Search");
+            testPCJSearch(conn);
+            log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
+            testAddAndTemporalSearchWithPCJ(conn);
+            log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ");
+            testAddAndFreeTextSearchWithPCJ(conn);
+            log.info("Running SPARQL Example: Add Point and Geo Search with PCJ");
+            testAddPointAndWithinSearchWithPCJ(conn);
+            log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search");
+            testTemporalFreeGeoSearch(conn);
+            log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
+            testGeoFreetextWithPCJSearch(conn);
+
+            log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
+        } finally {
+            log.info("Shutting down");
+            closeQuietly(conn);
+            closeQuietly(repository);
+        }
+    }
+
+    private static void closeQuietly(SailRepository repository) {
+        if (repository != null) {
+            try {
+                repository.shutDown();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static void closeQuietly(SailRepositoryConnection conn) {
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static Configuration getConf() {
+
+        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+        conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
+        conf.set(ConfigUtils.USE_PCJ, "true");
+        conf.set(ConfigUtils.USE_GEO, "true");
+        conf.set(ConfigUtils.USE_FREETEXT, "true");
+        conf.set(ConfigUtils.USE_TEMPORAL, "true");
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
+        conf.set(ConfigUtils.CLOUDBASE_USER, "root");
+        conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
+        conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
+        conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
+        conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
+
+        // only geo index statements with geo:asWKT predicates
+        conf.set(ConfigUtils.GEO_PREDICATES_LIST, GeoConstants.GEO_AS_WKT.stringValue());
+        return conf;
+    }
+
+    public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
+            RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
+            AccumuloException, AccumuloSecurityException, TableNotFoundException {
+
+        // Add data
+        String query = "INSERT DATA\n"//
+                + "{ GRAPH <http://updated/test> {\n"//
+                + "  <http://acme.com/people/Mike> " //
+                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
+                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
+
+        log.info("Performing Query");
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+
+        query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
+        CountingResultHandler resultHandler = new CountingResultHandler();
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+
+        Validate.isTrue(resultHandler.getCount() == 2);
+        resultHandler.resetCount();
+
+        // Delete Data
+        query = "DELETE DATA\n" //
+                + "{ GRAPH <http://updated/test> {\n"
+                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
+
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+
+        query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+
+        Validate.isTrue(resultHandler.getCount() == 0);
+    }
+    
+    
+    private static void testPCJSearch(SailRepositoryConnection conn) throws Exception {
+        
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+     // ///////////// search for bob
+        queryString = "SELECT ?e ?c ?l ?o " //
+                + "{" //
+                + "  ?e a ?c . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "}";//
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+           
+     // ///////////// search for bob
+        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?e ?c ?l ?o " //
+                + "{" //
+                + "  ?c a ?e . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "}";//
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 2);    
+        
+    }
+    
+
+    
+    
+    private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
+
+        // create some resources and literals to make statements out of
+
+        String sparqlInsert = "PREFIX time: <http://www.w3.org/2006/time#>\n"
+                + "INSERT DATA {\n" //
+                + "_:eventz       a       time:Instant ;\n"
+                + "     time:inXSDDateTime '2001-01-01T01:01:01-08:00' ;\n" //  one second
+                + "     time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^<http://www.w3.org/2001/XMLSchema#dateTime> ;\n" //   2 seconds
+                + "     time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" //   3 seconds
+                + "     time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" //   4 seconds
+                + "     time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"   
+                + "     time:inXSDDateTime '2006-01-01' ;\n" 
+                + "     time:inXSDDateTime '2007-01-01' ;\n" 
+                + "     time:inXSDDateTime '2008-01-01' ; .\n"
+                + "}";
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
+        update.execute();
+
+        // Find all stored dates.
+        String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+                + "SELECT ?event ?time \n" //
+                + "WHERE { \n"
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+                + "}";//
+       
+        
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        CountingResultHandler tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 5);
+        
+        // Find all stored dates.
+        queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+                + "SELECT ?event ?time \n" //
+                + "WHERE { \n"
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "  ?event a  time:Instant . \n"//
+                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+                + "}";//
+
+
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 5);
+
+
+        // Find all stored dates.
+        queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+                + "SELECT ?event ?time ?e ?c ?l ?o \n" //
+                + "WHERE { \n"
+                + "  ?e a ?c . \n"//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . \n"//
+                + "  ?e <uri:talksTo> ?o . \n"//
+                + "  ?event a  time:Instant . \n"//
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+                + "}";//
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 5);
+    }
+
+
+
+
+
+
+    private static void testAddAndFreeTextSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
+        // add data to the repository using the SailRepository add methods
+        ValueFactory f = conn.getValueFactory();
+        URI person = f.createURI("http://example.org/ontology/Person");
+
+        String uuid;
+
+        uuid = "urn:people:alice";
+        conn.add(f.createURI(uuid), RDF.TYPE, person);
+        conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
+
+        uuid = "urn:people:bobss";
+        conn.add(f.createURI(uuid), RDF.TYPE, person);
+        conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
+        
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // ///////////// search for alice
+        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?person ?match ?e ?c ?l ?o " //
+                + "{" //
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                + "  FILTER(fts:text(?match, \"pal*\")) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        
+
+        // ///////////// search for alice and bob
+        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?person ?match " //
+                + "{" //
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                  + "  ?person a <http://example.org/ontology/Person> . "//
+                + "  FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 2);
+        
+     // ///////////// search for alice and bob
+        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?person ?match " //
+                + "{" //
+                + "  ?person a <http://example.org/ontology/Person> . "//
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                + "  FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
+                + "  FILTER(fts:text(?match, \"pal*\")) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        
+        
+        // ///////////// search for bob
+        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?person ?match ?e ?c ?l ?o " //
+                + "{" //
+                + "  ?e a ?c . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "  ?person a <http://example.org/ontology/Person> . "//
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                + "  FILTER(fts:text(?match, \"!alice & hose\")) " //
+                + "}";//
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+    }
+
+
+
+    private static void testAddPointAndWithinSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
+
+        String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "INSERT DATA { " //
+                + "  <urn:feature> a geo:Feature ; " //
+                + "    geo:hasGeometry [ " //
+                + "      a geo:Point ; " //
+                + "      geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
+                + "    ] . " //
+                + "}";
+
+        Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
+        u.execute();
+        
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+        
+        // point outside search ring
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt " //
+                + "{" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+        
+        // point inside search ring
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
+                + "{" //
+                + "  ?feature a ?e . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+         
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        
+             
+        // point inside search ring with Pre-Computed Join
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
+                + "{" //
+                + "  ?feature a ?e . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+         
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs
+
+        // point outside search ring with PCJ
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt ?e ?l ?o " //
+                + "{" //
+                + "  ?feature a ?e . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+        
+        // point inside search ring with different Pre-Computed Join
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o " //
+                + "{" //
+                + "  ?e a ?c . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+    }
+    
+    
+    private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
+    RepositoryException, UpdateExecutionException, TupleQueryResultHandlerException, QueryEvaluationException {
+        
+        
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // ring containing point
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "PREFIX time: <http://www.w3.org/2006/time#> "//
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> "//
+                + "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?feature ?point ?wkt ?event ?time ?person ?match" //
+                + "{" //
+                + "  ?event a  time:Instant . \n"//
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)). " //
+                + "  ?person a <http://example.org/ontology/Person> . "//
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                + "  FILTER(fts:text(?match, \"pal*\")) " //
+                + "}";//
+        
+        
+        
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 5); 
+        
+    }
+    
+    
+    
+    private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
+    RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException {
+     // ring outside point
+        String queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o ?person ?match " //
+                + "{" //
+                + "  ?person a <http://example.org/ontology/Person> . "//
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                + "  FILTER(fts:text(?match, \"!alice & hose\")) " //
+                + "  ?e a ?c . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        CountingResultHandler tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+    }
+    
+    
+    
+    private static void createPCJ(SailRepositoryConnection conn) 
+            throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException {
+        
+        String queryString1 = ""//
+                + "SELECT ?e ?c ?l ?o " //
+                + "{" //
+                + "  ?c a ?e . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "}";//
+        
+        String queryString2 = ""//
+                + "SELECT ?e ?c ?l ?o " //
+                + "{" //
+                + "  ?e a ?c . "//
+                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+                + "  ?e <uri:talksTo> ?o . "//
+                + "}";//
+        
+        
+        URI obj,subclass,talksTo;
+        URI person = new URIImpl("urn:people:alice");
+        URI feature = new URIImpl("urn:feature");
+        URI sub = new URIImpl("uri:entity");
+        subclass = new URIImpl("uri:class");
+        obj = new URIImpl("uri:obj");
+        talksTo = new URIImpl("uri:talksTo");
+
+        conn.add(person, RDF.TYPE, sub);
+        conn.add(feature, RDF.TYPE, sub);
+        conn.add(sub, RDF.TYPE, subclass);
+        conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
+        conn.add(sub, talksTo, obj);
+       
+        AccumuloIndexSet ais1 = null; 
+        AccumuloIndexSet ais2 = null; 
+        String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
+        String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
+
+        Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes()));
+        accCon.tableOperations().create(tablename1);
+        accCon.tableOperations().create(tablename2);
+        
+        try {
+            ais1 = new AccumuloIndexSet(queryString1, conn, accCon, tablename1);
+            ais2 = new AccumuloIndexSet(queryString2, conn, accCon, tablename2);
+        } catch (MalformedQueryException e) {
+            e.printStackTrace();
+        } catch (SailException e) {
+            e.printStackTrace();
+        } catch (QueryEvaluationException e) {
+            e.printStackTrace();
+        } catch (MutationsRejectedException e) {
+            e.printStackTrace();
+        } catch (TableNotFoundException e) {
+            e.printStackTrace();
+        }
+        
+    }
+    
+
+    private static class CountingResultHandler implements TupleQueryResultHandler {
+        private int count = 0;
+
+        public int getCount() {
+            return count;
+        }
+
+        public void resetCount() {
+            this.count = 0;
+        }
+
+        @Override
+        public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
+            count++;
+            System.out.println(arg0);
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+
+        @Override
+        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat b/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
new file mode 100644
index 0000000..a89e3d1
--- /dev/null
+++ b/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
@@ -0,0 +1,41 @@
+@echo off
+rem Licensed to the Apache Software Foundation (ASF) under one
+rem or more contributor license agreements.  See the NOTICE file
+rem distributed with this work for additional information
+rem regarding copyright ownership.  The ASF licenses this file
+rem to you under the Apache License, Version 2.0 (the
+rem "License"); you may not use this file except in compliance
+rem with the License.  You may obtain a copy of the License at
+rem 
+rem   http://www.apache.org/licenses/LICENSE-2.0
+rem 
+rem Unless required by applicable law or agreed to in writing,
+rem software distributed under the License is distributed on an
+rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+rem KIND, either express or implied.  See the License for the
+rem specific language governing permissions and limitations
+rem under the License.
+SET CP=
+
+REM Check to see if javac is on the path
+where /Q javac
+IF %ERRORLEVEL% NEQ 0 goto :NO_JAVAC
+
+
+for /f %%f in ('DIR /b .\lib\*.jar') do call :append .\lib\%%f
+
+javac -cp "%CP%" RyaDirectExample.java
+java -cp "%CP%" RyaDirectExample
+
+goto :end
+
+:append
+@echo off
+SET CP=%CP%%1;
+goto :end
+
+:NO_JAVAC
+echo ERROR: Could not find javac
+goto :end
+
+:end

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/pom.xml
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/pom.xml b/extras/indexingSailExample/pom.xml
deleted file mode 100644
index d126457..0000000
--- a/extras/indexingSailExample/pom.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.extras</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-
-    <modelVersion>4.0.0</modelVersion>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <artifactId>rya.indexingSail.example</artifactId>
-
-    <dependencies>
-	    <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.prospector</artifactId>
-        </dependency>
-	    
-        
-       <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>mongodb.rya</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-	 	<dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.indexing</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.indexing</artifactId>
-            <classifier>accumulo-server</classifier>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.indexing</artifactId>
-            <classifier>map-reduce</classifier>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.accumulo</groupId>
-            <artifactId>accumulo-core</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.thrift</groupId>
-            <artifactId>libthrift</artifactId>
-        </dependency>
-
-        <dependency>
-          <groupId>org.locationtech.geomesa</groupId>
-          <artifactId>geomesa-distributed-runtime</artifactId>
-          <version>${geomesa.version}</version>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <artifactId>maven-assembly-plugin</artifactId>
-                <version>2.4</version>
-                <configuration>
-                    <descriptors>
-                        <descriptor>src/main/assembly/assembly.xml</descriptor>
-                    </descriptors>
-                </configuration>
-                <executions>
-                    <execution>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>single</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/assembly/assembly.xml b/extras/indexingSailExample/src/main/assembly/assembly.xml
deleted file mode 100644
index 047ea5f..0000000
--- a/extras/indexingSailExample/src/main/assembly/assembly.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
-
-    <id>distribution</id>
-    <formats>
-        <format>zip</format>
-    </formats>
-    <includeBaseDirectory>false</includeBaseDirectory>
-
-    <dependencySets>
-        <dependencySet>
-            <outputDirectory>accumulo/lib/ext</outputDirectory>
-            <includes>
-                <include>mvm.rya:rya.indexing:*:accumulo-server</include>
-                <include>org.locationtech.geomesa:geomesa-distributed-runtime:*</include>
-            </includes>
-        </dependencySet>
-        <dependencySet>
-            <outputDirectory>map-reduce</outputDirectory>
-            <includes>
-                <include>mvm.rya:rya.indexing:*:map-reduce</include>
-            </includes>
-        </dependencySet>
-        <dependencySet>
-            <outputDirectory>dist/lib</outputDirectory>
-            <includes>
-                <include>*</include>
-            </includes>
-            <excludes>
-                <!-- Do not include the example jar. Example batch script builds the example -->
-                <exclude>mvm.rya:rya.indexingSail.example</exclude>
-
-                <!-- Do not include the MR or Accumulo Server builds -->
-                <exclude>mvm.rya:rya.indexing:*:accumulo-server</exclude>
-                <exclude>mvm.rya:rya.indexing:*:map-reduce</exclude>
-            </excludes>
-            <scope>test</scope>
-        </dependencySet>
-    </dependencySets>
-    <files>
-        <file>
-            <source>src/main/scripts/RunRyaDirectExample.bat</source>
-            <outputDirectory>dist</outputDirectory>
-        </file>
-        <file>
-            <source>src/main/java/RyaDirectExample.java</source>
-            <outputDirectory>dist</outputDirectory>
-        </file>
-    </files>
-</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/java/EntityDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/java/EntityDirectExample.java b/extras/indexingSailExample/src/main/java/EntityDirectExample.java
deleted file mode 100644
index 408c754..0000000
--- a/extras/indexingSailExample/src/main/java/EntityDirectExample.java
+++ /dev/null
@@ -1,292 +0,0 @@
-
-
-import java.util.List;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.commons.lang.Validate;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.QueryResultHandlerException;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.Update;
-import org.openrdf.query.UpdateExecutionException;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.sail.Sail;
-
-public class EntityDirectExample {
-    private static final Logger log = Logger.getLogger(EntityDirectExample.class);
-
-    //
-    // Connection configuration parameters
-    //
-
-    private static final boolean USE_MOCK_INSTANCE = true;
-    private static final boolean PRINT_QUERIES = true;
-    private static final String INSTANCE = "instance";
-    private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
-    private static final String AUTHS = "U";
-    
-    public static void main(String[] args) throws Exception {
-        Configuration conf = getConf();
-        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-        
-        log.info("Creating the tables as root.");
-        SailRepository repository = null;
-        SailRepositoryConnection conn = null;
-      
-        try {
-            log.info("Connecting to Indexing Sail Repository.");
-            
-            Sail extSail = RyaSailFactory.getInstance(conf);
-            repository = new SailRepository(extSail);
-            repository.initialize();
-            conn = repository.getConnection();
-
-            log.info("Running SPARQL Example: Add and Delete");
-            testAddAndDelete(conn);
-            log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
-            testAddAndTemporalSearchWithPCJ(conn);
-            
-        } finally {
-            log.info("Shutting down");
-            closeQuietly(conn);
-            closeQuietly(repository);
-        }
-    }
-
-    private static void closeQuietly(SailRepository repository) {
-        if (repository != null) {
-            try {
-                repository.shutDown();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    private static void closeQuietly(SailRepositoryConnection conn) {
-        if (conn != null) {
-            try {
-                conn.close();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    
-
-
-   
-    public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
-            RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
-            AccumuloException, AccumuloSecurityException, TableNotFoundException {
-
-        // Add data
-        String query = "INSERT DATA\n"//
-                + "{ GRAPH <http://updated/test> {\n"//
-                + "  <http://acme.com/people/Mike> " //
-                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
-                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
-
-        log.info("Performing Query");
-
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-        update.execute();
-        
-        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
-                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
-        CountingResultHandler resultHandler = new CountingResultHandler();
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(resultHandler);
-        log.info("Result count : " + resultHandler.getCount());
-
-        Validate.isTrue(resultHandler.getCount() == 1);
-        resultHandler.resetCount();
-
-        //TODO delete currently not implemented in AccumuloRyaDAO for 
-//        // Delete Data
-//        query = "DELETE DATA\n" //
-//                + "{ GRAPH <http://updated/test> {\n"
-//                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-//                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-//
-//        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-//        update.execute();
-//
-//        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
-//                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
-//        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(resultHandler);
-//        log.info("Result count : " + resultHandler.getCount());
-//
-//        Validate.isTrue(resultHandler.getCount() == 0);
-    }
-    
-    
-
-    
-    
-    private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-
-        // create some resources and literals to make statements out of
-
-        String sparqlInsert = "PREFIX pref: <http://www.model/pref#> \n"
-                + "INSERT DATA {\n" //
-                + "<urn:Bob>       a       pref:Person ;\n" //
-                + "     pref:hasProperty1 'property1' ;\n" //  one second
-                + "     pref:hasProperty2 'property2' ;\n" //   2 seconds
-                + "     pref:hasProperty3 'property3' .\n" //   3 seconds
-                + "<urn:Fred>      a       pref:Person ; \n" //
-                + "     pref:hasProperty4 'property4' ; \n" //
-                + "     pref:hasProperty5 'property5' ; \n" //
-                + "}";
-
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
-        update.execute();
-        
-        String queryString = "PREFIX pref: <http://www.model/pref#> \n" //
-                + "SELECT ?x ?z \n" //
-                + "WHERE { \n"
-                + "  ?x a ?z. \n"
-                + "  ?x pref:hasProperty1 'property1' . \n"//
-                + "  ?x pref:hasProperty2 'property2' . \n"//
-                + "  ?x pref:hasProperty3 'property3' . \n"//
-                + "}";//
-       
-        
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        CountingResultHandler tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        Validate.isTrue(tupleHandler.getBsSize() == 2);
-        
-        queryString = "PREFIX pref: <http://www.model/pref#> \n" //
-                + "SELECT ?x ?w ?z \n" //
-                + "WHERE { \n"
-                + "  ?x a ?z. \n"
-                + "  ?x pref:hasProperty4 'property4' . \n"//
-                + "  ?x pref:hasProperty5 ?w . \n"//
-                + "}";//
-       
-        
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        Validate.isTrue(tupleHandler.getBsSize() == 3);
-        
-        
-        queryString = "PREFIX pref: <http://www.model/pref#> " 
-                + "SELECT ?v ?w ?x ?y ?z " 
-                + "WHERE { " 
-                + "  ?w a ?z  . " 
-                + "  ?w pref:hasProperty1 ?v . " 
-                + "  ?w pref:hasProperty2 'property2' . " 
-                + "  ?w pref:hasProperty3 'property3' . " 
-                + "  ?x a ?z  . "
-                + "  ?x pref:hasProperty4 'property4' . " 
-                + "  ?x pref:hasProperty5 ?y . " 
-                + "}";
-       
-        
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        Validate.isTrue(tupleHandler.getBsSize() == 5);
-        
-    }
-    
-    
-    private static Configuration getConf() {
-
-        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-
-        conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
-        conf.set(ConfigUtils.USE_ENTITY, "true");
-        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
-        conf.set(ConfigUtils.ENTITY_TABLENAME, RYA_TABLE_PREFIX + "entity");
-        conf.set(ConfigUtils.CLOUDBASE_USER, "root");
-        conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
-        conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
-        conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
-        conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
-
-        return conf;
-    }
-    
-
-    private static class CountingResultHandler implements TupleQueryResultHandler {
-        private int count = 0;
-        private int bindingSize = 0;
-        private boolean bsSizeSet = false;
-
-        public int getCount() {
-            return count;
-        }
-        
-        public int getBsSize() {
-            return bindingSize;
-        }
-        
-        public void resetBsSize() {
-            bindingSize = 0;
-            bsSizeSet = false;
-        }
-
-        public void resetCount() {
-            this.count = 0;
-        }
-
-        @Override
-        public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
-            count++;
-            if(!bsSizeSet) {
-                bindingSize = arg0.size();
-                bsSizeSet = true;
-            }
-            System.out.println(arg0);
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-
-        @Override
-        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java b/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
deleted file mode 100644
index 3f02fb2..0000000
--- a/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
+++ /dev/null
@@ -1,288 +0,0 @@
-import java.util.List;
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.mongodb.MongoDBRdfConfiguration;
-
-import org.apache.commons.lang.Validate;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.openrdf.model.Namespace;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.QueryResultHandlerException;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.Update;
-import org.openrdf.query.UpdateExecutionException;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.RepositoryResult;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.sail.Sail;
-
-public class MongoRyaDirectExample {
-    private static final Logger log = Logger.getLogger(MongoRyaDirectExample.class);
-
-    //
-    // Connection configuration parameters
-    //
-
-    private static final boolean PRINT_QUERIES = true;
-    private static final String MONGO_DB = "rya";
-    private static final String MONGO_COLL_PREFIX = "rya_";
-
-    public static void main(String[] args) throws Exception {
-        Configuration conf = getConf();
-        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-  
-        SailRepository repository = null;
-        SailRepositoryConnection conn = null;
-        try {
-            log.info("Connecting to Indexing Sail Repository.");
-            Sail sail = RyaSailFactory.getInstance(conf);
-            repository = new SailRepository(sail);
-            repository.initialize();
-            conn = repository.getConnection();
-
-            long start = System.currentTimeMillis();
-            log.info("Running SPARQL Example: Add and Delete");
-            testAddAndDelete(conn);
-            testAddAndDeleteNoContext(conn);
-            testAddNamespaces(conn);
-            testAddPointAndWithinSearch(conn);
-
-            log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
-        } finally {
-            log.info("Shutting down");
-            closeQuietly(conn);
-            closeQuietly(repository);
-        }
-    }
-
-    private static void testAddPointAndWithinSearch(SailRepositoryConnection conn) throws Exception {
-
-        String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "INSERT DATA { " //
-                + "  <urn:feature> a geo:Feature ; " //
-                + "    geo:hasGeometry [ " //
-                + "      a geo:Point ; " //
-                + "      geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
-                + "    ] . " //
-                + "}";
-
-        Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
-        u.execute();
-
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-
-        // ring containing point
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt " //
-                + "{" //
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs
-
-        // ring outside point
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt " //
-                + "{" //
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 0);
-    }
-
-    private static void closeQuietly(SailRepository repository) {
-        if (repository != null) {
-            try {
-                repository.shutDown();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    private static void closeQuietly(SailRepositoryConnection conn) {
-        if (conn != null) {
-            try {
-                conn.close();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    private static Configuration getConf() {
-
-        Configuration conf = new Configuration();
-        conf.set(ConfigUtils.USE_MONGO, "true");
-        conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true");
-        conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, MONGO_DB);
-        conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, MONGO_COLL_PREFIX);
-        conf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT");
-        conf.set(ConfigUtils.USE_GEO, "true");
-        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, MONGO_COLL_PREFIX);
-        
-        return conf;
-    }
-
-
-
-    public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
-            UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
-
-        // Add data
-        String query = "INSERT DATA\n"//
-                + "{ GRAPH <http://updated/test> {\n"//
-                + "  <http://acme.com/people/Mike> " //
-                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
-                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
-
-        log.info("Performing Query");
-
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-        update.execute();
-
-         query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
-         CountingResultHandler resultHandler = new CountingResultHandler();
-         TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-         tupleQuery.evaluate(resultHandler);
-         log.info("Result count : " + resultHandler.getCount());
-        
-         Validate.isTrue(resultHandler.getCount() == 2);
-        
-         resultHandler.resetCount();
-        
-         // Delete Data
-         query = "DELETE DATA\n" //
-         + "{ GRAPH <http://updated/test> {\n"
-         + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-         + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-        
-         update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-         update.execute();
-        
-         query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
-         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-         tupleQuery.evaluate(resultHandler);
-         log.info("Result count : " + resultHandler.getCount());
-        
-         Validate.isTrue(resultHandler.getCount() == 0);
-    }
-
-    public static void testAddNamespaces(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
-    UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
-
-    	conn.setNamespace("rya", "http://rya.com");
-    	RepositoryResult<Namespace> results = conn.getNamespaces();
-    	for (Namespace space : results.asList()){
-    		System.out.println(space.getName() + ", " + space.getPrefix());
-    	}
-      }
-
-    public static void testAddAndDeleteNoContext(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
-    UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
-
-    	// Add data
-    	String query = "INSERT DATA\n"//
-    			+ "{ \n"//
-    			+ "  <http://acme.com/people/Mike> " //
-    			+ "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
-    			+ "       <http://acme.com/actions/likes> \"Avocados\" .\n" + " }";
-
-    	log.info("Performing Query");
-
-    	Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-    	update.execute();
-
-    	query = "select ?p ?o {<http://acme.com/people/Mike> ?p ?o . }";
-    	CountingResultHandler resultHandler = new CountingResultHandler();
-    	TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-    	tupleQuery.evaluate(resultHandler);
-    	log.info("Result count : " + resultHandler.getCount());
-
-    	Validate.isTrue(resultHandler.getCount() == 2);
-
-    	resultHandler.resetCount();
-
-    	// Delete Data
-    	query = "DELETE DATA\n" //
-    			+ "{ \n"
-    			+ "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-    			+ "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}";
-
-    	update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-    	update.execute();
-
-    	query = "select ?p ?o { {<http://acme.com/people/Mike> ?p ?o . }}";
-    	tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-    	tupleQuery.evaluate(resultHandler);
-    	log.info("Result count : " + resultHandler.getCount());
-
-    	Validate.isTrue(resultHandler.getCount() == 0);
-    }
-
-    private static class CountingResultHandler implements TupleQueryResultHandler {
-        private int count = 0;
-
-        public int getCount() {
-            return count;
-        }
-
-        public void resetCount() {
-            this.count = 0;
-        }
-
-        @Override
-        public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
-            count++;
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-
-        @Override
-        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-    }
-}


[16/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/CloudbaseStorage.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/CloudbaseStorage.java b/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/CloudbaseStorage.java
deleted file mode 100644
index bcdd8cc..0000000
--- a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/CloudbaseStorage.java
+++ /dev/null
@@ -1,299 +0,0 @@
-//package mvm.rya.cloudbase.pig.dep;
-//
-//import cloudbase.core.CBConstants;
-//import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-//import cloudbase.core.data.*;
-//import cloudbase.core.security.Authorizations;
-//import org.apache.commons.codec.binary.Base64;
-//import org.apache.hadoop.conf.Configuration;
-//import org.apache.hadoop.fs.Path;
-//import org.apache.hadoop.io.Text;
-//import org.apache.hadoop.io.Writable;
-//import org.apache.hadoop.io.WritableComparable;
-//import org.apache.hadoop.mapreduce.*;
-//import org.apache.pig.LoadFunc;
-//import org.apache.pig.OrderedLoadFunc;
-//import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
-//import org.apache.pig.data.DataByteArray;
-//import org.apache.pig.data.Tuple;
-//import org.apache.pig.data.TupleFactory;
-//
-//import java.io.*;
-//import java.math.BigInteger;
-//import java.util.*;
-//
-///**
-// */
-//@Deprecated
-//public class CloudbaseStorage extends LoadFunc
-//        implements OrderedLoadFunc
-//{
-//
-//    protected String user;
-//    protected String password;
-//    protected Authorizations auths;
-//    protected String zk;
-//    protected String instanceName;
-//    protected String startRow;
-//    protected String endRow;
-//    protected Collection<Range> ranges;
-//    protected RecordReader reader;
-//
-//    public CloudbaseStorage(String startRow, String endRow, String instanceName, String zk, String user, String password) {
-//        auths = CBConstants.NO_AUTHS;
-//        this.startRow = startRow;
-//        this.endRow = endRow;
-//        this.instanceName = instanceName;
-//        this.zk = zk;
-//        this.user = user;
-//        this.password = password;
-//    }
-//
-//    protected void addRange(Range range) {
-//        if(ranges == null) {
-//            ranges = new ArrayList<Range>();
-//        }
-//        ranges.add(range);
-//    }
-//
-//    @Override
-//    public void setLocation(String tableName, Job job) throws IOException {
-//        try {
-//            Configuration conf = job.getConfiguration();
-//            //TODO: ?
-//            conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
-//            conf.setBoolean("mapred.map.tasks.speculative.execution", false);
-//            conf.set("io.sort.mb", "256");
-//            conf.setBoolean("mapred.compress.map.output", true);
-//            conf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
-//
-//            if (!conf.getBoolean(INSTANCE_HAS_BEEN_SET, false))
-//                CloudbaseInputFormat.setZooKeeperInstance(job, instanceName, zk);
-//            if (!conf.getBoolean(INPUT_INFO_HAS_BEEN_SET, false))
-//                CloudbaseInputFormat.setInputInfo(job, user, password.getBytes(), tableName, auths);
-//            System.out.println(tableName);
-//            conf.set(TABLE_NAME, tableName);
-//            if (ranges == null) {
-//                addRange(new Range(new Text(startRow), new Text(endRow)));
-//            }
-////            List<Range> ranges = getRanges(job);
-////            ranges.add(range);
-////            System.out.println(ranges);
-////            CloudbaseInputFormat.setRanges(job, ranges);
-//            CloudbaseInputFormat.setRanges(job, ranges);
-////        CloudbaseInputFormat.fetchColumns(job, Collections.singleton(new Pair<Text, Text>()));
-//        } catch (IllegalStateException e) {
-//            throw new IOException(e);
-//        }
-//    }
-//
-//    private static final String PREFIX = CloudbaseInputFormat.class.getSimpleName();
-//    private static final String RANGES = PREFIX + ".ranges";
-//    private static final String INSTANCE_HAS_BEEN_SET = PREFIX + ".instanceConfigured";
-//    private static final String INPUT_INFO_HAS_BEEN_SET = PREFIX + ".configured";
-//    private static final String TABLE_NAME = PREFIX + ".tablename";
-//
-//    protected static List<Range> getRanges(JobContext job) throws IOException {
-//        ArrayList<Range> ranges = new ArrayList<Range>();
-//        for (String rangeString : job.getConfiguration().getStringCollection(RANGES)) {
-//            ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(rangeString.getBytes()));
-//            Range range = new Range();
-//            range.readFields(new DataInputStream(bais));
-//            ranges.add(range);
-//        }
-//        return ranges;
-//    }
-//
-//    @Override
-//    public String relativeToAbsolutePath(String location, Path curDir) throws IOException {
-//        return location;
-//    }
-//
-//    @Override
-//    public InputFormat getInputFormat() throws IOException {
-//
-////        CloudbaseInputFormat format = new CloudbaseInputFormat() {
-////            @Override
-////            public List<InputSplit> getSplits(JobContext job) throws IOException {
-////                try {
-////                    List<InputSplit> splits = super.getSplits(job);
-////                    List<InputSplit> outsplits = new ArrayList<InputSplit>();
-////                    for (InputSplit inputSplit : splits) {
-////                        RangeInputSplit ris = (RangeInputSplit) inputSplit;
-////                        ByteArrayOutputStream bais = new ByteArrayOutputStream();
-////                        DataOutputStream out = new DataOutputStream(bais);
-////                        ris.write(out);
-////                        out.close();
-////                        MyRangeInputSplit rangeInputSplit = new MyRangeInputSplit();
-////                        DataInputStream in = new DataInputStream(new ByteArrayInputStream(bais.toByteArray()));
-////                        rangeInputSplit.readFields(in);
-////                        in.close();
-////                        String[] locations = inputSplit.getLocations();
-////                        String[] newlocs = new String[locations.length];
-////                        int i = 0;
-////                        for (String loc : locations) {
-////                            java.net.InetAddress inetAdd = java.net.InetAddress.getByName(loc);
-////                            newlocs[i] = inetAdd.getHostName();
-////                            i++;
-////                        }
-////                        rangeInputSplit.locations = newlocs;
-////                        outsplits.add(rangeInputSplit);
-////                    }
-////                    return outsplits;
-////                } catch (Exception e) {
-////                    throw new IOException(e);
-////                }
-////            }
-////        };
-////        return format;
-//
-//        return new CloudbaseInputFormat();
-//
-//    }
-//
-//    @Override
-//    public void prepareToRead(RecordReader recordReader, PigSplit pigSplit) throws IOException {
-//        this.reader = recordReader;
-//    }
-//
-//    @Override
-//    public Tuple getNext() throws IOException {
-//        try {
-//            if (reader.nextKeyValue()) {
-//                Key key = (Key) reader.getCurrentKey();
-//                Value value = (Value) reader.getCurrentValue();
-//
-//                Text row = key.getRow();
-//                Text cf = key.getColumnFamily();
-//                Text cq = key.getColumnQualifier();
-//                byte[] val_bytes = value.get();
-//                Tuple tuple = TupleFactory.getInstance().newTuple(4);
-//                tuple.set(0, row);
-//                tuple.set(1, cf);
-//                tuple.set(2, cq);
-//                tuple.set(3, new DataByteArray(val_bytes));
-//                return tuple;
-//            }
-//        } catch (Exception e) {
-//            throw new IOException(e);
-//        }
-//        return null;
-//    }
-//
-//    @Override
-//    public WritableComparable<?> getSplitComparable(InputSplit inputSplit) throws IOException {
-//        //cannot get access to the range directly
-//        CloudbaseInputFormat.RangeInputSplit rangeInputSplit = (CloudbaseInputFormat.RangeInputSplit) inputSplit;
-//        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-//        DataOutputStream out = new DataOutputStream(baos);
-//        rangeInputSplit.write(out);
-//        out.close();
-//        DataInputStream stream = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
-//        Range range = new Range();
-//        range.readFields(stream);
-//        stream.close();
-//        return range;
-//    }
-//
-//    public static class MyRangeInputSplit extends CloudbaseInputFormat.RangeInputSplit
-//            implements Writable {
-//
-//        private static byte[] extractBytes(ByteSequence seq, int numBytes) {
-//            byte bytes[] = new byte[numBytes + 1];
-//            bytes[0] = 0;
-//            for (int i = 0; i < numBytes; i++)
-//                if (i >= seq.length())
-//                    bytes[i + 1] = 0;
-//                else
-//                    bytes[i + 1] = seq.byteAt(i);
-//
-//            return bytes;
-//        }
-//
-//        public static float getProgress(ByteSequence start, ByteSequence end, ByteSequence position) {
-//            int maxDepth = Math.min(Math.max(end.length(), start.length()), position.length());
-//            BigInteger startBI = new BigInteger(extractBytes(start, maxDepth));
-//            BigInteger endBI = new BigInteger(extractBytes(end, maxDepth));
-//            BigInteger positionBI = new BigInteger(extractBytes(position, maxDepth));
-//            return (float) (positionBI.subtract(startBI).doubleValue() / endBI.subtract(startBI).doubleValue());
-//        }
-//
-//        public float getProgress(Key currentKey) {
-//            if (currentKey == null)
-//                return 0.0F;
-//            if (range.getStartKey() != null && range.getEndKey() != null) {
-//                if (range.getStartKey().compareTo(range.getEndKey(), PartialKey.ROW) != 0)
-//                    return getProgress(range.getStartKey().getRowData(), range.getEndKey().getRowData(), currentKey.getRowData());
-//                if (range.getStartKey().compareTo(range.getEndKey(), PartialKey.ROW_COLFAM) != 0)
-//                    return getProgress(range.getStartKey().getColumnFamilyData(), range.getEndKey().getColumnFamilyData(), currentKey.getColumnFamilyData());
-//                if (range.getStartKey().compareTo(range.getEndKey(), PartialKey.ROW_COLFAM_COLQUAL) != 0)
-//                    return getProgress(range.getStartKey().getColumnQualifierData(), range.getEndKey().getColumnQualifierData(), currentKey.getColumnQualifierData());
-//            }
-//            return 0.0F;
-//        }
-//
-//        /**
-//         * @deprecated Method getLength is deprecated
-//         */
-//
-//        public long getLength()
-//                throws IOException {
-//            Text startRow = range.isInfiniteStartKey() ? new Text(new byte[]{
-//                    -128
-//            }) : range.getStartKey().getRow();
-//            Text stopRow = range.isInfiniteStopKey() ? new Text(new byte[]{
-//                    127
-//            }) : range.getEndKey().getRow();
-//            int maxCommon = Math.min(7, Math.min(startRow.getLength(), stopRow.getLength()));
-//            long diff = 0L;
-//            byte start[] = startRow.getBytes();
-//            byte stop[] = stopRow.getBytes();
-//            for (int i = 0; i < maxCommon; i++) {
-//                diff |= 255 & (start[i] ^ stop[i]);
-//                diff <<= 8;
-//            }
-//
-//            if (startRow.getLength() != stopRow.getLength())
-//                diff |= 255L;
-//            return diff + 1L;
-//        }
-//
-//        public String[] getLocations()
-//                throws IOException {
-//            return locations;
-//        }
-//
-//        public void readFields(DataInput in)
-//                throws IOException {
-//            range.readFields(in);
-//            int numLocs = in.readInt();
-//            locations = new String[numLocs];
-//            for (int i = 0; i < numLocs; i++)
-//                locations[i] = in.readUTF();
-//
-//        }
-//
-//        public void write(DataOutput out)
-//                throws IOException {
-//            range.write(out);
-//            out.writeInt(locations.length);
-//            for (int i = 0; i < locations.length; i++)
-//                out.writeUTF(locations[i]);
-//
-//        }
-//
-//        public Range range;
-//        public String locations[];
-//
-//
-//        public MyRangeInputSplit() {
-//            range = new Range();
-//            locations = new String[0];
-//        }
-//
-//        MyRangeInputSplit(String table, Range range, String locations[]) {
-//            this.range = range;
-//            this.locations = locations;
-//        }
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/StatementPatternStorage.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/StatementPatternStorage.java b/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/StatementPatternStorage.java
deleted file mode 100644
index 6cc40bd..0000000
--- a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/dep/StatementPatternStorage.java
+++ /dev/null
@@ -1,178 +0,0 @@
-//package mvm.rya.cloudbase.pig.dep;
-//
-//import cloudbase.core.client.ZooKeeperInstance;
-//import cloudbase.core.data.Key;
-//import cloudbase.core.data.Range;
-//import com.google.common.io.ByteArrayDataInput;
-//import com.google.common.io.ByteStreams;
-//import mvm.mmrts.api.RdfCloudTripleStoreConstants;
-//import mvm.mmrts.api.RdfCloudTripleStoreUtils;
-//import mvm.rya.cloudbase.CloudbaseRdfDAO;
-//import mvm.rya.cloudbase.query.DefineTripleQueryRangeFactory;
-//import mvm.mmrts.rdftriplestore.inference.InferenceEngine;
-//import org.apache.hadoop.conf.Configuration;
-//import org.apache.hadoop.mapreduce.Job;
-//import org.apache.pig.data.Tuple;
-//import org.apache.pig.data.TupleFactory;
-//import org.openrdf.model.Resource;
-//import org.openrdf.model.Statement;
-//import org.openrdf.model.URI;
-//import org.openrdf.model.Value;
-//import org.openrdf.model.vocabulary.RDF;
-//import org.openrdf.query.MalformedQueryException;
-//import org.openrdf.query.algebra.StatementPattern;
-//import org.openrdf.query.algebra.Var;
-//import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-//import org.openrdf.query.parser.ParsedQuery;
-//import org.openrdf.query.parser.QueryParser;
-//import org.openrdf.query.parser.sparql.SPARQLParser;
-//
-//import java.io.IOException;
-//import java.util.Collection;
-//import java.util.Map;
-//import java.util.Set;
-//
-//import static mvm.mmrts.api.RdfCloudTripleStoreConstants.*;
-//
-///**
-// */
-//@Deprecated
-//public class StatementPatternStorage extends CloudbaseStorage {
-//    protected RdfCloudTripleStoreConstants.TABLE_LAYOUT layout;
-//    protected String subject;
-//    protected String predicate;
-//    protected String object;
-//    private Value object_value;
-//    private Value predicate_value;
-//    private Value subject_value;
-//
-//    DefineTripleQueryRangeFactory queryRangeFactory = new DefineTripleQueryRangeFactory();
-//
-//
-//    public StatementPatternStorage(String subject, String predicate, String object, String instanceName, String zk, String user, String password) {
-//        super(null, null, instanceName, zk, user, password);
-//        this.subject = (subject != null && subject.length() > 0) ? subject : "?s";
-//        this.predicate = (predicate != null && predicate.length() > 0) ? predicate : "?p";
-//        this.object = (object != null && object.length() > 0) ? object : "?o";
-//    }
-//
-//    private Value getValue(Var subjectVar) {
-//        return subjectVar.hasValue() ? subjectVar.getValue() : null;
-//    }
-//
-//    @Override
-//    public void setLocation(String tablePrefix, Job job) throws IOException {
-//        addStatementPatternRange(subject, predicate, object);
-//        addInferredRanges(tablePrefix, job);
-////            range = entry.getValue();
-////            layout = entry.getKey();
-//        if (layout == null)
-//            throw new IllegalArgumentException("Range and/or layout is null. Check the query");
-//        String tableName = RdfCloudTripleStoreUtils.layoutPrefixToTable(layout, tablePrefix);
-//        super.setLocation(tableName, job);
-//    }
-//
-//    protected void addInferredRanges(String tablePrefix, Job job) throws IOException {
-//        //inference engine
-//        CloudbaseRdfDAO rdfDAO = new CloudbaseRdfDAO();
-//        rdfDAO.setConf(job.getConfiguration());
-//        rdfDAO.setSpoTable(tablePrefix + TBL_SPO_SUFFIX);
-//        rdfDAO.setPoTable(tablePrefix + TBL_PO_SUFFIX);
-//        rdfDAO.setOspTable(tablePrefix + TBL_OSP_SUFFIX);
-//        rdfDAO.setNamespaceTable(tablePrefix + TBL_NS_SUFFIX);
-//        try {
-//            rdfDAO.setConnector(new ZooKeeperInstance(instanceName, zk).getConnector(user, password.getBytes()));
-//        } catch (Exception e) {
-//            throw new IOException(e);
-//        }
-//        rdfDAO.init();
-//        InferenceEngine inferenceEngine = new InferenceEngine();
-//        inferenceEngine.setConf(job.getConfiguration());
-//        inferenceEngine.setRyaDAO(rdfDAO);
-//        inferenceEngine.init();
-//        //is it subclassof or subpropertyof
-//        if(RDF.TYPE.equals(predicate_value)) {
-//            //try subclassof
-//            Collection<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), (URI) object_value);
-//            if (parents != null && parents.size() > 0) {
-//                //subclassof relationships found
-//                //don't add self, that will happen anyway later
-//                //add all relationships
-//                for(URI parent : parents) {
-//                    Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Range> temp =
-//                            queryRangeFactory.defineRange(subject_value, predicate_value, parent, new Configuration());
-//                    Range range = temp.getValue();
-//                    System.out.println(range);
-//                    addRange(range);
-//                }
-//            }
-//        } else if(predicate_value != null) {
-//            //subpropertyof check
-//            Set<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), (URI) predicate_value);
-//            for(URI parent : parents) {
-//                Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Range> temp =
-//                        queryRangeFactory.defineRange(subject_value, parent, object_value, new Configuration());
-//                Range range = temp.getValue();
-//                System.out.println(range);
-//                addRange(range);
-//            }
-//        }
-//        inferenceEngine.destroy();
-//        rdfDAO.destroy();
-//    }
-//
-//    protected void addStatementPatternRange(String subj, String pred, String obj) throws IOException {
-//        String sparql = "select * where {\n" +
-//                subj + " " + pred + " " + obj + ".\n" +
-//                "}";
-//        System.out.println(sparql);
-//        QueryParser parser = new SPARQLParser();
-//        ParsedQuery parsedQuery = null;
-//        try {
-//            parsedQuery = parser.parseQuery(sparql, null);
-//        } catch (MalformedQueryException e) {
-//            throw new IOException(e);
-//        }
-//        parsedQuery.getTupleExpr().visitChildren(new QueryModelVisitorBase<IOException>() {
-//            @Override
-//            public void meet(StatementPattern node) throws IOException {
-//                Var subjectVar = node.getSubjectVar();
-//                Var predicateVar = node.getPredicateVar();
-//                Var objectVar = node.getObjectVar();
-//                subject_value = getValue(subjectVar);
-//                predicate_value = getValue(predicateVar);
-//                object_value = getValue(objectVar);
-//                System.out.println(subject_value + " " + predicate_value + " " + object_value);
-//                Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Range> temp =
-//                        queryRangeFactory.defineRange((Resource) subject_value, (URI) predicate_value, object_value, new Configuration());
-//                layout = temp.getKey();
-//                Range range = temp.getValue();
-//                addRange(range);
-//                System.out.println(range);
-//            }
-//        });
-//    }
-//
-//    @Override
-//    public Tuple getNext() throws IOException {
-//        try {
-//            if (reader.nextKeyValue()) {
-//                Key key = (Key) reader.getCurrentKey();
-//                cloudbase.core.data.Value value = (cloudbase.core.data.Value) reader.getCurrentValue();
-//                ByteArrayDataInput input = ByteStreams.newDataInput(key.getRow().getBytes());
-//                Statement statement = RdfCloudTripleStoreUtils.translateStatementFromRow(input,
-//                        key.getColumnFamily(), layout, RdfCloudTripleStoreConstants.VALUE_FACTORY);
-//
-//                Tuple tuple = TupleFactory.getInstance().newTuple(4);
-//                tuple.set(0, statement.getSubject().stringValue());
-//                tuple.set(1, statement.getPredicate().stringValue());
-//                tuple.set(2, statement.getObject().stringValue());
-//                tuple.set(3, (statement.getContext() != null) ? (statement.getContext().stringValue()) : (null));
-//                return tuple;
-//            }
-//        } catch (Exception e) {
-//            throw new IOException(e);
-//        }
-//        return null;
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/optimizer/SimilarVarJoinOptimizer.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/optimizer/SimilarVarJoinOptimizer.java b/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/optimizer/SimilarVarJoinOptimizer.java
deleted file mode 100644
index 3e14fdc..0000000
--- a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/optimizer/SimilarVarJoinOptimizer.java
+++ /dev/null
@@ -1,189 +0,0 @@
-package mvm.rya.cloudbase.pig.optimizer;
-
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.algebra.*;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-import org.openrdf.query.algebra.helpers.StatementPatternCollector;
-
-import java.util.*;
-
-/**
- * A query optimizer that re-orders nested Joins according to cardinality, preferring joins that have similar variables.
- *
- */
-public class SimilarVarJoinOptimizer implements QueryOptimizer {
-
-    protected final EvaluationStatistics statistics;
-
-    public SimilarVarJoinOptimizer() {
-        this(new EvaluationStatistics());
-    }
-
-    public SimilarVarJoinOptimizer(EvaluationStatistics statistics) {
-        this.statistics = statistics;
-    }
-
-    /**
-     * Applies generally applicable optimizations: path expressions are sorted
-     * from more to less specific.
-     *
-     * @param tupleExpr
-     */
-    public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
-        tupleExpr.visit(new JoinVisitor());
-    }
-
-    protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
-
-        Set<String> boundVars = new HashSet<String>();
-
-        @Override
-        public void meet(LeftJoin leftJoin) {
-            leftJoin.getLeftArg().visit(this);
-
-            Set<String> origBoundVars = boundVars;
-            try {
-                boundVars = new HashSet<String>(boundVars);
-                boundVars.addAll(leftJoin.getLeftArg().getBindingNames());
-
-                leftJoin.getRightArg().visit(this);
-            } finally {
-                boundVars = origBoundVars;
-            }
-        }
-
-        @Override
-        public void meet(Join node) {
-            Set<String> origBoundVars = boundVars;
-            try {
-                boundVars = new HashSet<String>(boundVars);
-
-                // Recursively get the join arguments
-                List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
-
-                // Build maps of cardinalities and vars per tuple expression
-                Map<TupleExpr, Double> cardinalityMap = new HashMap<TupleExpr, Double>();
-
-                for (TupleExpr tupleExpr : joinArgs) {
-                    double cardinality = statistics.getCardinality(tupleExpr);
-                    cardinalityMap.put(tupleExpr, cardinality);
-                }
-
-                // Reorder the (recursive) join arguments to a more optimal sequence
-                List<TupleExpr> orderedJoinArgs = new ArrayList<TupleExpr>(joinArgs.size());
-                TupleExpr last = null;
-                while (!joinArgs.isEmpty()) {
-                    TupleExpr tupleExpr = selectNextTupleExpr(joinArgs, cardinalityMap, last);
-                    if (tupleExpr == null) {
-                        break;
-                    }
-
-                    joinArgs.remove(tupleExpr);
-                    orderedJoinArgs.add(tupleExpr);
-                    last = tupleExpr;
-
-                    // Recursively optimize join arguments
-                    tupleExpr.visit(this);
-
-                    boundVars.addAll(tupleExpr.getBindingNames());
-                }
-
-                // Build new join hierarchy
-                // Note: generated hierarchy is right-recursive to help the
-                // IterativeEvaluationOptimizer to factor out the left-most join
-                // argument
-                int i = 0;
-                TupleExpr replacement = orderedJoinArgs.get(i);
-                for (i++; i < orderedJoinArgs.size(); i++) {
-                    replacement = new Join(replacement, orderedJoinArgs.get(i));
-                }
-
-                // Replace old join hierarchy
-                node.replaceWith(replacement);
-            } finally {
-                boundVars = origBoundVars;
-            }
-        }
-
-        protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
-            if (tupleExpr instanceof Join) {
-                Join join = (Join) tupleExpr;
-                getJoinArgs(join.getLeftArg(), joinArgs);
-                getJoinArgs(join.getRightArg(), joinArgs);
-            } else {
-                joinArgs.add(tupleExpr);
-            }
-
-            return joinArgs;
-        }
-
-        protected List<Var> getStatementPatternVars(TupleExpr tupleExpr) {
-            if(tupleExpr == null)
-                return null;
-            List<StatementPattern> stPatterns = StatementPatternCollector.process(tupleExpr);
-            List<Var> varList = new ArrayList<Var>(stPatterns.size() * 4);
-            for (StatementPattern sp : stPatterns) {
-                sp.getVars(varList);
-            }
-            return varList;
-        }
-
-        protected <M extends Map<Var, Integer>> M getVarFreqMap(List<Var> varList, M varFreqMap) {
-            for (Var var : varList) {
-                Integer freq = varFreqMap.get(var);
-                freq = (freq == null) ? 1 : freq + 1;
-                varFreqMap.put(var, freq);
-            }
-            return varFreqMap;
-        }
-
-        /**
-         * Selects from a list of tuple expressions the next tuple expression that
-         * should be evaluated. This method selects the tuple expression with
-         * highest number of bound variables, preferring variables that have been
-         * bound in other tuple expressions over variables with a fixed value.
-         */
-        protected TupleExpr selectNextTupleExpr(List<TupleExpr> expressions,
-                                                Map<TupleExpr, Double> cardinalityMap,
-                                                TupleExpr last) {
-            double lowestCardinality = Double.MAX_VALUE;
-            TupleExpr result = expressions.get(0);
-            expressions = getExprsWithSameVars(expressions, last);
-
-            for (TupleExpr tupleExpr : expressions) {
-                // Calculate a score for this tuple expression
-                double cardinality = cardinalityMap.get(tupleExpr);
-
-                if (cardinality < lowestCardinality) {
-                    // More specific path expression found
-                    lowestCardinality = cardinality;
-                    result = tupleExpr;
-                }
-            }
-
-            return result;
-        }
-
-        protected List<TupleExpr> getExprsWithSameVars(List<TupleExpr> expressions, TupleExpr last) {
-            if(last == null)
-                return expressions;
-            List<TupleExpr> retExprs = new ArrayList<TupleExpr>();
-            for(TupleExpr tupleExpr : expressions) {
-                List<Var> statementPatternVars = getStatementPatternVars(tupleExpr);
-                List<Var> lastVars = getStatementPatternVars(last);
-                statementPatternVars.retainAll(lastVars);
-                if(statementPatternVars.size() > 0) {
-                    retExprs.add(tupleExpr);
-                }
-            }
-            if(retExprs.size() == 0) {
-                return expressions;
-            }
-            return retExprs;
-        }
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/org/apache/hadoop/mapred/PreferLocalMapTaskSelector.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/org/apache/hadoop/mapred/PreferLocalMapTaskSelector.java b/pig/cloudbase.pig/src/main/java/org/apache/hadoop/mapred/PreferLocalMapTaskSelector.java
deleted file mode 100644
index 9966704..0000000
--- a/pig/cloudbase.pig/src/main/java/org/apache/hadoop/mapred/PreferLocalMapTaskSelector.java
+++ /dev/null
@@ -1,39 +0,0 @@
-//package org.apache.hadoop.mapred;
-//
-//import org.apache.hadoop.net.Node;
-//
-//import java.io.IOException;
-//import java.util.Arrays;
-//
-///**
-// */
-//public class PreferLocalMapTaskSelector extends DefaultTaskSelector {
-//
-//    @Override
-//    public Task obtainNewMapTask(TaskTrackerStatus taskTracker, JobInProgress job) throws IOException {
-//        return this.obtainNewLocalMapTask(taskTracker, job);
-//    }
-//
-//    public Task obtainNewLocalMapTask(TaskTrackerStatus taskTracker, JobInProgress job)
-//            throws IOException {
-//        ClusterStatus clusterStatus = taskTrackerManager.getClusterStatus();
-//        int numTaskTrackers = clusterStatus.getTaskTrackers();
-//        System.out.println(taskTracker.getHost());
-//        for (TaskInProgress tip : job.maps) {
-//            String[] splitLocations = tip.getSplitLocations();
-//            System.out.println(Arrays.toString(splitLocations));
-//            for (String loc : splitLocations) {
-//                Node node = job.jobtracker.getNode(loc);
-//                System.out.println(node);
-//                if(!taskTracker.getHost().equals(loc)) {
-//                    return null;
-//                }
-//            }
-//        }
-//
-//        Node node = job.jobtracker.getNode(taskTracker.getHost());
-//        System.out.println(node);
-//        Task task = job.obtainNewLocalMapTask(taskTracker, numTaskTrackers, taskTrackerManager.getNumberOfUniqueHosts());
-//        return task;
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseInputFormatMain.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseInputFormatMain.java b/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseInputFormatMain.java
deleted file mode 100644
index a2a3e45..0000000
--- a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseInputFormatMain.java
+++ /dev/null
@@ -1,50 +0,0 @@
-//package mvm.mmrts.cloudbase.pig;
-//
-//import cloudbase.core.CBConstants;
-//import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-//import cloudbase.core.data.Range;
-//import mvm.mmrts.api.RdfCloudTripleStoreConstants;
-//import mvm.rya.cloudbase.query.DefineTripleQueryRangeFactory;
-//import org.apache.hadoop.conf.Configuration;
-//import org.apache.hadoop.mapreduce.InputSplit;
-//import org.apache.hadoop.mapreduce.JobContext;
-//import org.openrdf.model.ValueFactory;
-//import org.openrdf.model.impl.ValueFactoryImpl;
-//
-//import java.util.Collections;
-//import java.util.List;
-//import java.util.Map;
-//
-///**
-// * Created by IntelliJ IDEA.
-// * User: RoshanP
-// * Date: 4/5/12
-// * Time: 4:52 PM
-// * To change this template use File | Settings | File Templates.
-// */
-//public class CloudbaseInputFormatMain {
-//    public static void main(String[] args) {
-//        try {
-//            ValueFactory vf = new ValueFactoryImpl();
-//            CloudbaseInputFormat format = new CloudbaseInputFormat();
-//            Configuration configuration = new Configuration();
-//            JobContext context = new JobContext(configuration, null);
-//            CloudbaseInputFormat.setZooKeeperInstance(context, "stratus", "stratus13:2181");
-//            CloudbaseInputFormat.setInputInfo(context, "root", "password".getBytes(), "l_po", CBConstants.NO_AUTHS);
-//            DefineTripleQueryRangeFactory queryRangeFactory = new DefineTripleQueryRangeFactory();
-//            Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT,Range> entry =
-//                    queryRangeFactory.defineRange(null, vf.createURI("urn:lubm:rdfts#takesCourse"), null, context.getConfiguration());
-//            CloudbaseInputFormat.setRanges(context, Collections.singleton(entry.getValue()));
-//            List<InputSplit> splits = format.getSplits(context);
-//            for (InputSplit inputSplit : splits) {
-//                String[] locations = inputSplit.getLocations();
-//                for (String loc : locations) {
-//                    java.net.InetAddress inetAdd = java.net.InetAddress.getByName(loc);
-//                    System.out.println("Hostname is: " + inetAdd.getHostName());
-//                }
-//            }
-//        } catch (Exception e) {
-//            e.printStackTrace();
-//        }
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseStorageTest.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseStorageTest.java b/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseStorageTest.java
deleted file mode 100644
index 5cb0940..0000000
--- a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/CloudbaseStorageTest.java
+++ /dev/null
@@ -1,250 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.admin.SecurityOperations;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.security.ColumnVisibility;
-import cloudbase.core.security.TablePermission;
-import junit.framework.TestCase;
-import mvm.rya.cloudbase.pig.CloudbaseStorage;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.*;
-import org.apache.pig.data.Tuple;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/20/12
- * Time: 10:17 AM
- * To change this template use File | Settings | File Templates.
- */
-public class CloudbaseStorageTest extends TestCase {
-
-    private String user = "user";
-    private String pwd = "pwd";
-    private String instance = "myinstance";
-    private String table = "testTable";
-    private Authorizations auths = CBConstants.NO_AUTHS;
-    private Connector connector;
-
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        connector = new MockInstance(instance).getConnector(user, pwd.getBytes());
-        connector.tableOperations().create(table);
-        SecurityOperations secOps = connector.securityOperations();
-        secOps.createUser(user, pwd.getBytes(), auths);
-        secOps.grantTablePermission(user, table, TablePermission.READ);
-        secOps.grantTablePermission(user, table, TablePermission.WRITE);
-    }
-
-    public void testSimpleOutput() throws Exception {
-        BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2);
-        Mutation row = new Mutation("row");
-        row.put("cf", "cq", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        batchWriter.flush();
-        batchWriter.close();
-
-        String location = "cloudbase://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|z&mock=true";
-        CloudbaseStorage storage = createCloudbaseStorage(location);
-        int count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(1, count);
-    }
-
-    public void testRange() throws Exception {
-        BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2);
-        Mutation row = new Mutation("a");
-        row.put("cf", "cq", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        row = new Mutation("b");
-        row.put("cf", "cq", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        row = new Mutation("d");
-        row.put("cf", "cq", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        batchWriter.flush();
-        batchWriter.close();
-
-        String location = "cloudbase://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&mock=true";
-        CloudbaseStorage storage = createCloudbaseStorage(location);
-        int count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(2, count);
-    }
-
-    public void testMultipleRanges() throws Exception {
-        BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2);
-        Mutation row = new Mutation("a");
-        row.put("cf", "cq", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        row = new Mutation("b");
-        row.put("cf", "cq", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        row = new Mutation("d");
-        row.put("cf", "cq", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        batchWriter.flush();
-        batchWriter.close();
-
-        String location = "cloudbase://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&range=d|e&mock=true";
-        List<CloudbaseStorage> storages = createCloudbaseStorages(location);
-        assertEquals(2, storages.size());
-        CloudbaseStorage storage = storages.get(0);
-        int count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(2, count);
-        storage = storages.get(1);
-        count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(1, count);
-    }
-
-    public void testColumns() throws Exception {
-        BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2);
-        Mutation row = new Mutation("a");
-        row.put("cf1", "cq", new Value(new byte[0]));
-        row.put("cf2", "cq", new Value(new byte[0]));
-        row.put("cf3", "cq1", new Value(new byte[0]));
-        row.put("cf3", "cq2", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        batchWriter.flush();
-        batchWriter.close();
-
-        String location = "cloudbase://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&columns=cf1,cf3|cq1&mock=true";
-        CloudbaseStorage storage = createCloudbaseStorage(location);
-        int count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(2, count);
-    }
-
-    public void testWholeRowRange() throws Exception {
-        BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2);
-        Mutation row = new Mutation("a");
-        row.put("cf1", "cq", new Value(new byte[0]));
-        row.put("cf2", "cq", new Value(new byte[0]));
-        row.put("cf3", "cq1", new Value(new byte[0]));
-        row.put("cf3", "cq2", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        batchWriter.flush();
-        batchWriter.close();
-
-        String location = "cloudbase://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a&mock=true";
-        CloudbaseStorage storage = createCloudbaseStorage(location);
-        int count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(4, count);
-    }
-
-    public void testAuths() throws Exception {
-        BatchWriter batchWriter = connector.createBatchWriter(table, 10l, 10l, 2);
-        Mutation row = new Mutation("a");
-        row.put("cf1", "cq1", new ColumnVisibility("A"), new Value(new byte[0]));
-        row.put("cf2", "cq2", new Value(new byte[0]));
-        batchWriter.addMutation(row);
-        batchWriter.flush();
-        batchWriter.close();
-
-        String location = "cloudbase://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&mock=true";
-        CloudbaseStorage storage = createCloudbaseStorage(location);
-        int count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(1, count);
-
-        location = "cloudbase://" + table + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&range=a|c&auths=A&mock=true";
-        storage = createCloudbaseStorage(location);
-        count = 0;
-        while (true) {
-            Tuple next = storage.getNext();
-            if (next == null)
-                break;
-            assertEquals(6, next.size());
-            count++;
-        }
-        assertEquals(2, count);
-    }
-
-    protected CloudbaseStorage createCloudbaseStorage(String location) throws IOException, InterruptedException {
-        List<CloudbaseStorage> cloudbaseStorages = createCloudbaseStorages(location);
-        if (cloudbaseStorages.size() > 0) {
-            return cloudbaseStorages.get(0);
-        }
-        return null;
-    }
-
-    protected List<CloudbaseStorage> createCloudbaseStorages(String location) throws IOException, InterruptedException {
-        List<CloudbaseStorage> cloudbaseStorages = new ArrayList<CloudbaseStorage>();
-        CloudbaseStorage storage = new CloudbaseStorage();
-        InputFormat inputFormat = storage.getInputFormat();
-        Job job = new Job(new Configuration());
-        storage.setLocation(location, job);
-        List<InputSplit> splits = inputFormat.getSplits(job);
-        assertNotNull(splits);
-
-        for (InputSplit inputSplit : splits) {
-            storage = new CloudbaseStorage();
-            job = new Job(new Configuration());
-            storage.setLocation(location, job);
-            TaskAttemptContext taskAttemptContext = new TaskAttemptContext(job.getConfiguration(),
-                    new TaskAttemptID("jtid", 0, false, 0, 0));
-            RecordReader recordReader = inputFormat.createRecordReader(inputSplit,
-                    taskAttemptContext);
-            recordReader.initialize(inputSplit, taskAttemptContext);
-
-            storage.prepareToRead(recordReader, null);
-            cloudbaseStorages.add(storage);
-        }
-        return cloudbaseStorages;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngineTest.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngineTest.java b/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngineTest.java
deleted file mode 100644
index f3cdd5f..0000000
--- a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngineTest.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import junit.framework.TestCase;
-import mvm.rya.cloudbase.pig.SparqlQueryPigEngine;
-import mvm.rya.cloudbase.pig.SparqlToPigTransformVisitor;
-import org.apache.pig.ExecType;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/23/12
- * Time: 10:14 AM
- * To change this template use File | Settings | File Templates.
- */
-public class SparqlQueryPigEngineTest extends TestCase {
-
-    private SparqlQueryPigEngine engine;
-
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-
-        engine = new SparqlQueryPigEngine();
-        engine.setSparqlToPigTransformVisitor(visitor);
-        engine.setExecType(ExecType.LOCAL);
-        engine.setInference(false);
-        engine.setStats(false);
-        engine.init();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        super.tearDown();
-        engine.destroy();
-    }
-
-    public void testStatementPattern() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-                "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                " SELECT * WHERE\n" +
-                " {\n" +
-                "\t<http://www.Department0.University0.edu> ?p ?o\n" +
-                " }\n" +
-                "";
-
-//        engine.runQuery(query, "/temp/testSP");
-        assertNotNull(engine.generatePigScript(query));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitorTest.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitorTest.java b/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitorTest.java
deleted file mode 100644
index fc5abb8..0000000
--- a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitorTest.java
+++ /dev/null
@@ -1,367 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import junit.framework.TestCase;
-import mvm.rya.cloudbase.pig.optimizer.SimilarVarJoinOptimizer;
-import mvm.rya.cloudbase.pig.SparqlToPigTransformVisitor;
-import org.openrdf.query.algebra.QueryRoot;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.parser.ParsedQuery;
-import org.openrdf.query.parser.QueryParser;
-import org.openrdf.query.parser.sparql.SPARQLParser;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/12/12
- * Time: 10:18 AM
- * To change this template use File | Settings | File Templates.
- */
-public class SparqlToPigTransformVisitorTest extends TestCase {
-
-    public void testStatementPattern() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-                "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                " SELECT * WHERE\n" +
-                " {\n" +
-                "\t?x rdf:type ub:UndergraduateStudent\n" +
-                " }\n" +
-                "";
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-//        System.out.println(parsedQuery);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-        System.out.println(visitor.getPigScript());
-    }
-
-    public void testStatementPatternContext() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-                "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                " SELECT * WHERE\n" +
-                " {\n" +
-                " GRAPH ub:g1 {\n" +
-                "\t?x rdf:type ub:UndergraduateStudent\n" +
-                " }\n" +
-                " }\n" +
-                "";
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-//        System.out.println(parsedQuery);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testStatementPatternContextVar() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-                "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                " SELECT * WHERE\n" +
-                " {\n" +
-                " GRAPH ?g {\n" +
-                "\t?x rdf:type ub:UndergraduateStudent\n" +
-                " }\n" +
-                " ?x ub:pred ?g." +
-                " }\n" +
-                "";
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-//        System.out.println(parsedQuery);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testJoin() throws Exception {
-        String query = "select * where {\n" +
-                "?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
-                "?subj <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
-                "}";
-//        System.out.println(query);
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-//        System.out.println(parsedQuery);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testMutliReturnJoin() throws Exception {
-        String query = "select * where {\n" +
-                "?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
-                "?subj <urn:lubm:rdfts#subOrganizationOf> ?suborg.\n" +
-                "}";
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-//        System.out.println(parsedQuery);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testMutlipleJoins() throws Exception {
-        String query = "select * where {\n" +
-                "?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
-                "?subj <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
-                "?subj <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:lubm:rdfts#Department>.\n" +
-                "}";
-//        System.out.println(query);
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-//        System.out.println(parsedQuery);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testCross() throws Exception {
-        String query = "select * where {\n" +
-                "?subj0 <urn:lubm:rdfts#name> 'Department0'.\n" +
-                "?subj1 <urn:lubm:rdfts#name> 'Department1'.\n" +
-                "?subj0 <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
-                "?subj1 <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
-                "}";
-//        System.out.println(query);
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-        QueryRoot tupleExpr = new QueryRoot(parsedQuery.getTupleExpr());
-
-        SimilarVarJoinOptimizer similarVarJoinOptimizer = new SimilarVarJoinOptimizer();
-        similarVarJoinOptimizer.optimize(tupleExpr, null, null);
-
-//        System.out.println(tupleExpr);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(tupleExpr);
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testLimit() throws Exception {
-        String query = "select * where {\n" +
-                "?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
-                "?subj <urn:lubm:rdfts#subOrganizationOf> ?suborg.\n" +
-                "} limit 100";
-//        System.out.println(query);
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-//        System.out.println(parsedQuery);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testHardQuery() throws Exception {
-//        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-//                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-//                " SELECT * WHERE\n" +
-//                " {\n" +
-//                "        ?y rdf:type ub:University .\n" +
-//                "        ?z ub:subOrganizationOf ?y .\n" +
-//                "        ?z rdf:type ub:Department .\n" +
-//                "        ?x ub:memberOf ?z .\n" +
-//                "        ?x ub:undergraduateDegreeFrom ?y .\n" +
-//                "       ?x rdf:type ub:GraduateStudent .\n" +
-//                " }\n" +
-//                "limit 100";
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-                " SELECT * WHERE\n" +
-                " {\n" +
-                "\t?x ub:advisor ?y.\n" +
-                "\t?y ub:teacherOf ?z.\n" +
-                "\t?x ub:takesCourse ?z.\n" +
-                "\t?x rdf:type ub:Student.\n" +
-                "\t?y rdf:type ub:Faculty.\n" +
-                "\t?z rdf:type ub:Course.\n" +
-                " }\n" +
-                "limit 100";
-//        System.out.println(query);
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-        TupleExpr tupleExpr = parsedQuery.getTupleExpr();
-
-//        CloudbaseRdfEvalStatsDAO rdfEvalStatsDAO = new CloudbaseRdfEvalStatsDAO();
-//        rdfEvalStatsDAO.setConnector(new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes()));
-//        rdfEvalStatsDAO.setEvalTable("l_eval");
-//        RdfCloudTripleStoreEvaluationStatistics stats = new RdfCloudTripleStoreEvaluationStatistics(new Configuration(), rdfEvalStatsDAO);
-//        (new SimilarVarJoinOptimizer(stats)).optimize(tupleExpr, null, null);
-
-//        System.out.println(tupleExpr);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(tupleExpr));
-//        System.out.println(visitor.getPigScript());
-    }
-
-    public void testFixedStatementPatternInferenceQuery() throws Exception {
-        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-                " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
-                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-                " SELECT * WHERE\n" +
-                " {\n" +
-                "      ?y ub:memberOf <http://www.Department3.University10.edu>.\n" +
-                "      {?y rdf:type ub:Professor.}\n" +
-                "       UNION \n" +
-                "      {?y rdf:type ub:GraduateStudent.}\n" +
-                " }";
-//        System.out.println(query);
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-
-        TupleExpr tupleExpr = parsedQuery.getTupleExpr();
-
-//        Configuration conf = new Configuration();
-//        Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes());
-//
-//        InferenceEngine inferenceEngine = new InferenceEngine();
-//        CloudbaseRdfDAO rdfDAO = new CloudbaseRdfDAO();
-//        rdfDAO.setConf(conf);
-//        rdfDAO.setConnector(connector);
-//        rdfDAO.setNamespaceTable("l_ns");
-//        rdfDAO.setSpoTable("l_spo");
-//        rdfDAO.setPoTable("l_po");
-//        rdfDAO.setOspTable("l_osp");
-//        rdfDAO.init();
-//
-//        inferenceEngine.setRyaDAO(rdfDAO);
-//        inferenceEngine.setConf(conf);
-//        inferenceEngine.init();
-//
-//        tupleExpr.visit(new TransitivePropertyVisitor(conf, inferenceEngine));
-//        tupleExpr.visit(new SymmetricPropertyVisitor(conf, inferenceEngine));
-//        tupleExpr.visit(new InverseOfVisitor(conf, inferenceEngine));
-//        tupleExpr.visit(new SubPropertyOfVisitor(conf, inferenceEngine));
-//        tupleExpr.visit(new SubClassOfVisitor(conf, inferenceEngine));
-//
-//        CloudbaseRdfEvalStatsDAO rdfEvalStatsDAO = new CloudbaseRdfEvalStatsDAO();
-//        rdfEvalStatsDAO.setConnector(connector);
-//        rdfEvalStatsDAO.setEvalTable("l_eval");
-//        RdfCloudTripleStoreEvaluationStatistics stats = new RdfCloudTripleStoreEvaluationStatistics(conf, rdfEvalStatsDAO);
-//        (new QueryJoinOptimizer(stats)).optimize(tupleExpr, null, null);
-
-//        System.out.println(tupleExpr);
-
-        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-        visitor.setTablePrefix("l_");
-        visitor.setInstance("stratus");
-        visitor.setZk("stratus13:2181");
-        visitor.setUser("root");
-        visitor.setPassword("password");
-        visitor.meet(new QueryRoot(tupleExpr));
-//        System.out.println(visitor.getPigScript());
-    }
-
-//    public void testInverseOf() throws Exception {
-//        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
-//                " PREFIX ub: <urn:lubm:rdfts#>\n" +
-//                " SELECT * WHERE\n" +
-//                " {\n" +
-//                "     ?x rdf:type ub:Person .\n" +
-//                "     <http://www.University0.edu> ub:hasAlumnus ?x .\n" +
-//                " } ";
-//        System.out.println(query);
-//        QueryParser parser = new SPARQLParser();
-//        ParsedQuery parsedQuery = parser.parseQuery(query, null);
-//        TupleExpr tupleExpr = parsedQuery.getTupleExpr();
-//
-//        Configuration conf = new Configuration();
-//        Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes());
-//
-//        InferenceEngine inferenceEngine = new InferenceEngine();
-//        CloudbaseRdfDAO rdfDAO = new CloudbaseRdfDAO();
-//        rdfDAO.setConf(conf);
-//        rdfDAO.setConnector(connector);
-//        rdfDAO.setNamespaceTable("l_ns");
-//        rdfDAO.setSpoTable("l_spo");
-//        rdfDAO.setPoTable("l_po");
-//        rdfDAO.setOspTable("l_osp");
-//        rdfDAO.init();
-//
-//        inferenceEngine.setRyaDAO(rdfDAO);
-//        inferenceEngine.setConf(conf);
-//        inferenceEngine.init();
-//
-//        tupleExpr.visit(new TransitivePropertyVisitor(conf, inferenceEngine));
-//        tupleExpr.visit(new SymmetricPropertyVisitor(conf, inferenceEngine));
-//        tupleExpr.visit(new InverseOfVisitor(conf, inferenceEngine));
-//
-//        CloudbaseRdfEvalStatsDAO rdfEvalStatsDAO = new CloudbaseRdfEvalStatsDAO();
-//        rdfEvalStatsDAO.setConnector(connector);
-//        rdfEvalStatsDAO.setEvalTable("l_eval");
-//        RdfCloudTripleStoreEvaluationStatistics stats = new RdfCloudTripleStoreEvaluationStatistics(conf, rdfEvalStatsDAO);
-//        (new QueryJoinOptimizer(stats)).optimize(tupleExpr, null, null);
-//
-//
-//        System.out.println(tupleExpr);
-//
-//        SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-//        visitor.setTablePrefix("l_");
-//        visitor.setInstance("stratus");
-//        visitor.setZk("stratus13:2181");
-//        visitor.setUser("root");
-//        visitor.setPassword("password");
-//        visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
-//        System.out.println(visitor.getPigScript());
-//    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/StatementPatternStorageTest.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/StatementPatternStorageTest.java b/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/StatementPatternStorageTest.java
deleted file mode 100644
index de88138..0000000
--- a/pig/cloudbase.pig/src/test/java/mvm/rya/cloudbase/pig/StatementPatternStorageTest.java
+++ /dev/null
@@ -1,148 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.admin.SecurityOperations;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.security.TablePermission;
-import junit.framework.TestCase;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.cloudbase.CloudbaseRdfConfiguration;
-import mvm.rya.cloudbase.CloudbaseRyaDAO;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.*;
-import org.apache.pig.data.Tuple;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ContextStatementImpl;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/20/12
- * Time: 5:14 PM
- * To change this template use File | Settings | File Templates.
- */
-public class StatementPatternStorageTest extends TestCase {
-
-    private String user = "user";
-    private String pwd = "pwd";
-    private String instance = "myinstance";
-    private String tablePrefix = "t_";
-    private Authorizations auths = CBConstants.NO_AUTHS;
-    private Connector connector;
-    private CloudbaseRyaDAO ryaDAO;
-    private ValueFactory vf = new ValueFactoryImpl();
-    private String namespace = "urn:test#";
-    private CloudbaseRdfConfiguration conf;
-
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        connector = new MockInstance(instance).getConnector(user, pwd.getBytes());
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
-        connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX);
-        SecurityOperations secOps = connector.securityOperations();
-        secOps.createUser(user, pwd.getBytes(), auths);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ);
-        secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ);
-
-        conf = new CloudbaseRdfConfiguration();
-        ryaDAO = new CloudbaseRyaDAO();
-        ryaDAO.setConnector(connector);
-        conf.setTablePrefix(tablePrefix);
-        ryaDAO.setConf(conf);
-        ryaDAO.init();
-    }
-
-    public void testSimplePredicateRange() throws Exception {
-        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "a"), vf.createURI(namespace, "p"), vf.createLiteral("l"))));
-        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "b"), vf.createURI(namespace, "p"), vf.createLiteral("l"))));
-        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "c"), vf.createURI(namespace, "n"), vf.createLiteral("l"))));
-        
-
-        int count = 0;
-        List<StatementPatternStorage> storages = createStorages("cloudbase://" + tablePrefix + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&predicate=<" + namespace + "p>&mock=true");
-        for (StatementPatternStorage storage : storages) {
-            while (true) {
-                Tuple next = storage.getNext();
-                if (next == null) {
-                    break;
-                }
-                count++;
-            }
-        }
-        assertEquals(2, count);
-        ryaDAO.destroy();
-    }
-
-    public void testContext() throws Exception {
-        ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "a"), vf.createURI(namespace, "p"), vf.createLiteral("l1"))));
-        ryaDAO.add(RdfToRyaConversions.convertStatement(new ContextStatementImpl(vf.createURI(namespace, "a"), vf.createURI(namespace, "p"), vf.createLiteral("l2"), vf.createURI(namespace, "g1"))));
-        
-
-        int count = 0;
-        List<StatementPatternStorage> storages = createStorages("cloudbase://" + tablePrefix + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&predicate=<" + namespace + "p>&mock=true");
-        for (StatementPatternStorage storage : storages) {
-            while (true) {
-                Tuple next = storage.getNext();
-                if (next == null) {
-                    break;
-                }
-                count++;
-            }
-        }
-        assertEquals(2, count);
-
-        count = 0;
-        storages = createStorages("cloudbase://" + tablePrefix + "?instance=" + instance + "&user=" + user + "&password=" + pwd + "&predicate=<" + namespace + "p>&context=<"+namespace+"g1>&mock=true");
-        for (StatementPatternStorage storage : storages) {
-            while (true) {
-                Tuple next = storage.getNext();
-                if (next == null) {
-                    break;
-                }
-                count++;
-            }
-        }
-        assertEquals(1, count);
-
-        ryaDAO.destroy();
-    }
-
-    protected List<StatementPatternStorage> createStorages(String location) throws IOException, InterruptedException {
-        List<StatementPatternStorage> storages = new ArrayList<StatementPatternStorage>();
-        StatementPatternStorage storage = new StatementPatternStorage();
-        InputFormat inputFormat = storage.getInputFormat();
-        Job job = new Job(new Configuration());
-        storage.setLocation(location, job);
-        List<InputSplit> splits = inputFormat.getSplits(job);
-        assertNotNull(splits);
-
-        for (InputSplit inputSplit : splits) {
-            storage = new StatementPatternStorage();
-            job = new Job(new Configuration());
-            storage.setLocation(location, job);
-            TaskAttemptContext taskAttemptContext = new TaskAttemptContext(job.getConfiguration(),
-                    new TaskAttemptID("jtid", 0, false, 0, 0));
-            RecordReader recordReader = inputFormat.createRecordReader(inputSplit,
-                    taskAttemptContext);
-            recordReader.initialize(inputSplit, taskAttemptContext);
-
-            storage.prepareToRead(recordReader, null);
-            storages.add(storage);
-        }
-        return storages;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/pom.xml
----------------------------------------------------------------------
diff --git a/pig/pom.xml b/pig/pom.xml
index e766717..2df2d1c 100644
--- a/pig/pom.xml
+++ b/pig/pom.xml
@@ -1,15 +1,37 @@
 <?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.pig</artifactId>
+    <name>Apache Rya Pig Projects</name>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <modules>
         <module>accumulo.pig</module>
     </modules>


[26/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/conversion/Operation.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/conversion/Operation.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/conversion/Operation.java
deleted file mode 100644
index c93c085..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/conversion/Operation.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package ss.cloudbase.core.iterators.conversion;
-
-public class Operation {
-	protected static final char[] ops = new char[] {'+', '-', '*', '/', '%', '^'};
-	protected String field;
-	protected char op;
-	protected double operand;
-	
-	public Operation(String config) {
-		if (config.startsWith("conversion.")) {
-			config = config.substring("conversion.".length());
-		}
-		
-		String[] parts = config.split("\\s");
-		if (parts.length == 3) {
-			field = parts[0];
-			op = parts[1].charAt(0);
-			if (!checkOp(op)) {
-				throw new IllegalArgumentException("Operator '" + op + "' is not among the supported operators: " + getOps());
-			}
-			try {
-				operand = Double.parseDouble(parts[2]);
-			} catch (NumberFormatException e) {
-				throw new IllegalArgumentException("Operand '" + parts[2] + "' could not be parsed as a number.");
-			}
-		} else {
-			throw new IllegalArgumentException("'" + config + "' was not in the format 'field op value'");
-		}
-	}
-	
-	public String getField() {
-		return field;
-	}
-	
-	public char getOp() {
-		return op;
-	}
-	
-	public double getOperand() {
-		return operand;
-	}
-	
-	public String execute(String value) {
-		if (value == null) {
-			return value;
-		}
-		
-		double v = Double.NaN;
-		
-		try {
-			v = Double.parseDouble(value);
-		} catch (NumberFormatException e) {
-			// we'll attempt to convert hex strings
-			try {
-				v = Integer.parseInt(value, 16);
-			} catch (NumberFormatException e1) {
-				return value;
-			}
-		} 
-		
-		switch (op) {
-		case '+': 
-			v += operand;
-			break;
-		case '-':
-			v -= operand;
-			break;
-		case '*':
-			v *= operand;
-			break;
-		case '/':
-			v /= operand;
-			break;
-		case '%':
-			v %= operand;
-			break;
-		case '^':
-			v = Math.pow(v, operand);
-			break;
-		}	
-	
-		return "" + v;
-	}
-	
-	protected String getOps() {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		
-		for (char c: ops) {
-			if (first) {
-				sb.append(c);
-				first = false;
-			} else {
-				sb.append(',');
-				sb.append(c);
-			}
-		}
-		return sb.toString();
-	}
-	
-	protected boolean checkOp(char op) {
-		for (char c: ops) {
-			if (op == c) {
-				return true;
-			}
-		}
-		return false;
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/CBConverter.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/CBConverter.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/CBConverter.java
deleted file mode 100644
index 7d6bedd..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/CBConverter.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-package ss.cloudbase.core.iterators.filter;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.log4j.Logger;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-
-/**
- * 
- * @author rashah
- */
-public class CBConverter {
-
-	/** The string that separates the key/value pairs in the row value. */
-	public static final String OPTION_PAIR_DELIMITER = "pairDelimiter";
-	/**
-	 * The string that separates the key and the value(s) within a pair in the
-	 * row value.
-	 */
-	public static final String OPTION_VALUE_DELIMITER = "valueDelimiter";
-	/**
-	 * Contains the pair delimiter provided through the
-	 * <code>OPTION_PAIR_DELIMITER</code> option.
-	 */
-	protected String pairDelimiter = "\u0000";
-	/**
-	 * Contains the value delimiter provided through the
-	 * <code>OPTION_VALUE_DELIMITER</code> option.
-	 */
-	protected String valueDelimiter = "\uFFFD";
-	private static Logger LOG = Logger.getLogger(CBConverter.class);
-
-	public CBConverter() {
-	}
-
-	public Map<String, String> toMap(Key CBKey, Value CBValue) {
-		LOG.trace("Convert");
-
-		Map<String, String> return_value = new HashMap<String, String>();
-
-		String value = CBValue.toString();
-
-		// Determine the start/end of the value.
-		int valueStartIndex = 0;
-		int valueEndIndex = value.length();
-
-		int vLen = valueDelimiter.length();
-		int fLen = pairDelimiter.length();
-		LOG.debug(vLen + ", " + fLen + ", CBValue = " + CBValue.toString());
-		// Parse each of the values from the row value.
-		while (valueStartIndex < valueEndIndex) {
-			int vIndex = value.indexOf(valueDelimiter, valueStartIndex);
-
-			// If an "equals" sign was found, parse the key and value.
-			if (vIndex != -1) {
-				String key = value.substring(valueStartIndex, vIndex).trim();
-				int v = value.indexOf(valueDelimiter, vIndex + vLen);
-				if (v == -1) {
-					v = valueEndIndex;
-				}
-				int f = value.indexOf(pairDelimiter, vIndex + vLen);
-				if (f == -1) {
-					f = valueEndIndex;
-				}
-
-				int fIndex = Math.min(f, v);
-				String val = value.substring(vIndex + 1, fIndex).trim();
-				valueStartIndex = f;
-				valueStartIndex += fLen;
-				return_value.put(key, val);
-				LOG.debug("Key {" + key + "} Value {" + val + "}");
-			}
-		}
-
-		return return_value;
-	}
-	
-	public Value toValue(Map<String, String> record) {
-		StringBuilder sb = new StringBuilder();
-		boolean first = true;
-		
-		for (Entry<String, String> e: record.entrySet()) {
-			if (first) {
-				first = false;
-			} else {
-				sb.append(pairDelimiter);
-			}
-			sb.append(e.getKey());
-			sb.append(valueDelimiter);
-			sb.append(e.getValue());
-		}
-		
-		return new Value(sb.toString().getBytes());
-	}
-
-	public void init(Map<String, String> options) {
-		LOG.trace("Init");
-
-		pairDelimiter = options.get(OPTION_PAIR_DELIMITER);
-		if (pairDelimiter == null || pairDelimiter.length() == 0) {
-			pairDelimiter = "\u0000";
-		}
-
-		valueDelimiter = options.get(OPTION_VALUE_DELIMITER);
-		if (valueDelimiter == null || valueDelimiter.length() == 0) {
-			valueDelimiter = "\uFFFD";
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVDateFilter.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVDateFilter.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVDateFilter.java
deleted file mode 100644
index 9063f12..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVDateFilter.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-package ss.cloudbase.core.iterators.filter.general;
-
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.filter.Filter;
-import java.sql.Timestamp;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
-/**
- * This filter will take an incoming frequency and match that to a range
- * contained within the cloudbase record
- *
- * @author Raju Shah
- */
-public class GVDateFilter implements Filter
-{
-
-  private static final Logger LOG = Logger.getLogger(GVDateFilter.class);
-  /** The string that indicates the key name in the row value. */
-  public static final String OPTIONInTimestamp = "InDate";
-  protected String TimeStamp_S = "2011-03-03 20:44:28.633";
-  protected Timestamp TimeStamp_T = Timestamp.valueOf(TimeStamp_S);
-  public static final String OPTIONGVTimeStartField = "date-start";
-  protected String DateStartField = "date-start";
-  public static final String OPTIONGVTimeEndField = "date-end";
-  protected String DateEndField = "date-end";
-  public static final String OPTIONRBActive = "RBCurrentlyActive";
-  protected String RBActive = "version";
-  CBConverter cbconvertor = new CBConverter();
-
-  public long GetUSecFromString(String Time_S)
-  {
-    long return_value = 0;
-    Date d = null;
-    Calendar c = Calendar.getInstance();
-    SimpleDateFormat df_long = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
-    SimpleDateFormat df_med = new SimpleDateFormat("yyyy-MM-dd");
-
-    try
-    {
-      d = df_long.parse(Time_S);
-    }
-    catch (Exception e)
-    {
-      try
-      {
-        d = df_med.parse(Time_S);
-      }
-      catch (Exception e1)
-      {
-        System.out.println("Don't like it [" + Time_S + "]");
-        return return_value;
-      }
-    }
-    c.setTime(d);
-    return_value = c.getTimeInMillis();
-
-    return return_value;
-  }
-
-  /**
-   * Whether or not to accept this key/value entry. A map of row keys and values is parsed and then sent off to the process function to be evaluated.
-   * @param key The cloudbase entry key
-   * @param value The cloudbase entry value
-   * @return True if the entry should be included in the results, false otherwise
-   */
-  @Override
-  public boolean accept(Key CBKey, Value CBValue)
-  {
-    LOG.trace("accept");
-
-    boolean return_value = false;
-
-    Map<String, String> CBRecord = cbconvertor.toMap(CBKey, CBValue);
-
-    // Get the Date Strings
-    String sStart = (String) CBRecord.get(DateStartField);
-    Timestamp tStart = new Timestamp(0);
-    String sEnd = (String) CBRecord.get(DateEndField);
-    Timestamp tEnd = new Timestamp(0);
-
-    //Get Active Strings
-    String rbActive = (String) CBRecord.get(RBActive);
-
-    //LOGIC
-    //1) If The signal is NOT ACTIVE (I.E. the active flag is specified and off) PUNT
-    if ( ((rbActive != null) && rbActive.equals("0")) )
-    {
-      return return_value;
-    }
-    //1) Remaining signals are either specified ACTIVE or NOT INDICATED
-
-
-    //LOGIC
-    //2) Next check if both start and end are specified, then it must be inbetween
-    if ((sStart != null) && (sEnd != null))
-    {
-      tStart.setTime(GetUSecFromString(sStart));
-      tEnd.setTime(GetUSecFromString(sEnd));
-      if (tStart.before(TimeStamp_T) && TimeStamp_T.before(tEnd))
-      {
-        return_value = true;
-      }
-      return return_value;
-    }
-
-
-    //LOGIC
-    //3) If the start date is specified then just check against start date
-    if (sStart != null)
-    {
-      tStart.setTime(GetUSecFromString(sStart));
-      if (tStart.before(TimeStamp_T))
-      {
-        return_value = true;
-      }
-      return return_value;
-    }
-
-    //LOGIC
-    //4) Return false for all others - Start Date must be present
-
-
-    return return_value;
-  }
-
-  @Override
-  public void init(Map<String, String> options)
-  {
-    LOG.trace("init");
-    cbconvertor.init(options);
-
-    DateStartField = options.get(OPTIONGVTimeStartField);
-    if (DateStartField == null || DateStartField.length() == 0)
-    {
-      DateStartField = "date-start";
-    }
-
-
-    DateEndField = options.get(OPTIONGVTimeEndField);
-    if (DateEndField == null || DateEndField.length() == 0)
-    {
-      DateEndField = "date-end";
-    }
-
-
-    TimeStamp_S = options.get(OPTIONInTimestamp);
-    if (TimeStamp_S == null || TimeStamp_S.length() == 0)
-    {
-      TimeStamp_S = "2011-03-03T20:44:28.633Z";
-    }
-    TimeStamp_T.setTime(GetUSecFromString(TimeStamp_S));
-
-
-    LOG.debug("Creating Time Filter, does  " + TimeStamp_S + " = " + TimeStamp_T.toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVFrequencyFilter.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVFrequencyFilter.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVFrequencyFilter.java
deleted file mode 100644
index f4c2edc..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/general/GVFrequencyFilter.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-package ss.cloudbase.core.iterators.filter.general;
-
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.filter.Filter;
-
-/**
- * This filter will take an incoming frequency and match that to a range
- * contained within the cloudbase record
- *
- * @author Raju Shah
- */
-public class GVFrequencyFilter implements Filter
-{
-
-  private static final Logger LOG = Logger.getLogger(GVFrequencyFilter.class);
-  /** The string that indicates the key name in the row value. */
-  public static final String OPTIONFrequency = "frequency";
-  protected String Frequency_S = "0.0";
-  protected Double Frequency_D = Double.parseDouble(Frequency_S);
-  // Initially the values in Global Vision are just Center Freq and BW
-  // On the second revision we may change that to the actual ranges so 
-  // the numerical computations below can be optimized out.  Then we can just use the normal OGC filters
-  //public static final String OPTIONGVFrequencyStart = "Frequency_Start";
-  //public static final String OPTIONGVFrequencyEnd   = "Frequency_End";
-  public static final String OPTIONGVCenterFrequency = "frequency";
-  public static final String OPTIONGVBandwidth = "bandwidth";
-  CBConverter cbconvertor = new CBConverter();
-
-  /**
-   * Whether or not to accept this key/value entry. A map of row keys and values is parsed and then sent off to the process function to be evaluated.
-   * @param key The cloudbase entry key
-   * @param value The cloudbase entry value
-   * @return True if the entry should be included in the results, false otherwise
-   */
-  @Override
-  public boolean accept(Key CBKey, Value CBValue)
-  {
-    LOG.trace("Accept");
-
-    boolean return_value = false;
-    Map<String, String> CBRecord = cbconvertor.toMap(CBKey, CBValue);
-
-    try
-    {
-      String s1 = (String) CBRecord.get(OPTIONGVCenterFrequency);
-      String s2 = (String) CBRecord.get(OPTIONGVBandwidth);
-
-      Double d1 = Double.parseDouble(s1);
-      Double d2 = Double.parseDouble(s2);
-
-      if (((d1 - (0.5 * d2)) <= Frequency_D) && (Frequency_D <= (d1 + (0.5 * d2))))
-      {
-        return_value = true;
-      }
-
-    }
-    catch (Exception e)
-    {
-      return_value = false;
-    }
-
-    return return_value;
-  }
-
-  @Override
-  public void init(Map<String, String> options)
-  {
-    LOG.trace("Init");
-
-    cbconvertor.init(options);
-
-    Frequency_S = options.get(OPTIONFrequency);
-    if (Frequency_S == null || Frequency_S.length() == 0)
-    {
-      Frequency_S = "0.0";
-    }
-
-
-    Frequency_D = Double.parseDouble(Frequency_S);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/jts/JTSFilter.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/jts/JTSFilter.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/jts/JTSFilter.java
deleted file mode 100644
index 625b48a..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/jts/JTSFilter.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * This is a filter for some basic Geo Functionality for data stored in a WKT format
- */
-package ss.cloudbase.core.iterators.filter.jts;
-
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.filter.Filter;
-
-import com.vividsolutions.jts.io.WKTReader;
-import com.vividsolutions.jts.io.ParseException;
-import com.vividsolutions.jts.geom.Geometry;
-import com.vividsolutions.jts.geom.Point;
-import com.vividsolutions.jts.geom.Coordinate;
-import com.vividsolutions.jts.geom.CoordinateSequence;
-import com.vividsolutions.jts.geom.GeometryFactory;
-import com.vividsolutions.jts.geom.impl.CoordinateArraySequenceFactory;
-
-/**
- * @author Raju Shah
- */
-public class JTSFilter implements Filter
-{
-
-  private static final Logger logger = Logger.getLogger(JTSFilter.class);
-  /** The string that indicates the key name in the row value. */
-  public static final String OPTIONGeometryKeyName = "GeometryKeyName";
-  protected String GeometryKeyName = "geometry-contour";
-  /** The string that is the centerpoint - Latitude. */
-  public static final String OPTIONCenterPointLat = "latitude";
-  protected String CenterPointLat = "0.0";
-  /** The string that is the centerpoint - Longitude. */
-  public static final String OPTIONCenterPointLon = "longitude";
-  protected String CenterPointLon = "0.0";
-  public static final String OPTIONBeamIDName = "BeamID";
-  protected String BeamIDKeyName = "beam-globalviewid";
-  /** The string that is the centerpoint - Latitude. */
-  /** The compare type for the geometric point **/
-  protected Point p = null;
-  CBConverter cbconvertor = new CBConverter();
-
-  /**
-   * Whether or not to accept this key/value entry. A map of row keys and values is parsed and then sent off to the process function to be evaluated.
-   * @param key The cloudbase entry key
-   * @param value The cloudbase entry value
-   * @return True if the entry should be included in the results, false otherwise
-   */
-  @Override
-  public boolean accept(Key CBKey, Value CBValue)
-  {
-    boolean return_value = false;
-    Map<String, String> CBRecord = cbconvertor.toMap(CBKey, CBValue);
-
-    String s = (String) CBRecord.get(GeometryKeyName);
-
-    // I expect the field to exist
-    if ((s == null) || (s.length() < 1))
-    {
-      return return_value;
-    }
-
-    // If the object cotains the word POLYGON or MULTIPOLYGON then it should be good
-    if (s.contains("POLYGON"))
-    {
-      //convert that string into a geometry
-      WKTReader reader = new WKTReader();
-      try
-      {
-        Geometry WKTgeometry = reader.read(s);
-
-        //See if the two geometries overlap
-        return_value = p.coveredBy(WKTgeometry);
-      }
-      catch (Exception e)
-      {
-          try
-          {
-            String beamid = (String) CBRecord.get(BeamIDKeyName);
-            logger.debug("Bad Beam ID ["+beamid + "]");
-            //See if the two geometries overlap
-          }
-          catch (Exception ex)
-          {
-          }
-
-        //logger.error(e, e);
-        return return_value;
-      }
-    }
-    else
-    {
-      String start_s = "SDO_ORDINATE_ARRAY(";
-      int start_index = s.indexOf(start_s);
-      if (start_index != -1)
-      {
-        start_index += start_s.length();
-
-        int end_index = s.indexOf(")", start_index);
-
-        if (end_index == -1)
-        {
-          return false;
-        }
-        s = s.substring(start_index, end_index);
-        //System.out.println("{" + s + "}");
-
-        //remove every other ,
-        // want to search for -70.838, 39.967, and replace with -70.838 39.967,
-        start_index = 1;
-        end_index = s.length();
-        while ((start_index < (end_index - 1)) && (start_index > 0))
-        {
-          start_index = s.indexOf(",", start_index);
-          char[] temp = s.toCharArray();
-          temp[start_index] = ' ';
-          s = new String(temp);
-          //skip the next one
-          start_index = s.indexOf(",", start_index) + 1;
-        }
-        //System.out.println("<" + s + ">");
-
-        //convert that string into a geometry
-        WKTReader reader = new WKTReader();
-        try
-        {
-          Geometry WKTgeometry = reader.read("POLYGON((" + s + "))");
-
-          //See if the two geometries overlap
-          return_value = p.coveredBy(WKTgeometry);
-        }
-        catch (Exception e)
-        {
-          //logger.error(e, e);
-          return return_value;
-        }
-      }
-    }
-    return return_value;
-  }
-
-  @Override
-  public void init(Map<String, String> options)
-  {
-    cbconvertor.init(options);
-
-    GeometryKeyName = options.get(OPTIONGeometryKeyName);
-    if (GeometryKeyName == null || GeometryKeyName.length() == 0)
-    {
-      GeometryKeyName = "geometry-contour";
-    }
-
-
-    CenterPointLat = options.get(OPTIONCenterPointLat);
-    if (CenterPointLat == null || CenterPointLat.length() == 0)
-    {
-      CenterPointLat = "0.0";
-    }
-
-
-    CenterPointLon = options.get(OPTIONCenterPointLon);
-    if (CenterPointLon == null || CenterPointLon.length() == 0)
-    {
-      CenterPointLon = "0.0";
-    }
-
-    BeamIDKeyName = options.get(OPTIONBeamIDName);
-    if (BeamIDKeyName == null || BeamIDKeyName.length() == 0)
-    {
-      BeamIDKeyName = "beam-globalviewid";
-    }
-
-    Double CenterPointLatD = Double.parseDouble(CenterPointLat);
-    Double CenterPointLonD = Double.parseDouble(CenterPointLon);
-
-    Coordinate[] coordinates =
-    {
-      new Coordinate(CenterPointLonD, CenterPointLatD)
-    };
-
-    CoordinateSequence cs = CoordinateArraySequenceFactory.instance().create(coordinates);
-    GeometryFactory gf = new GeometryFactory();
-
-    p = new Point(cs, gf);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/OGCFilter.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/OGCFilter.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/OGCFilter.java
deleted file mode 100644
index 7bf622d..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/OGCFilter.java
+++ /dev/null
@@ -1,241 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-
-import org.apache.log4j.Logger;
-import org.w3c.dom.Document;
-import org.w3c.dom.Node;
-import org.xml.sax.InputSource;
-import org.xml.sax.SAXException;
-
-import ss.cloudbase.core.iterators.filter.ogc.operation.IOperation;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.filter.Filter;
-import cloudbase.start.classloader.CloudbaseClassLoader;
-
-/**
- * The OGCFilter class provides a basic implementation of the 
- * <a href="http://www.opengeospatial.org/standards/filter">OGC Filter Encoding specification</a>. 
- * This allows for arbitrary queries to be passed via XML and executed in a distributed fashion across tablet servers. The following
- * code sets up a basic FilteringIterator that uses this filter (note that this jar must be present in each
- * of the tablet servers' classpaths to work):
- * 
- * <code>
- * <pre>
- * cloudbase.core.client.Scanner reader;
- * // set up the reader ...
- *  
- * // we're going to parse a row formated like key:value|key:value|key:value and return all of the rows
- * // where the key "city" starts with "new"
- * String filter = "&lt;PropertyIsLike&gt;&lt;PropertyName&gt;city&lt;/PropertyName&gt;&lt;Literal&gt;new*&lt;/Literal&gt;&lt;/PropertyIsLike&gt;";
- * 
- * reader.setScanIterators(50, FilteringIterator.class.getName(), "myIterator");
- * reader.setScanIteratorOption("myIterator", "0", OGCFilter.class.getName());
- * reader.setScanIteratorOption("myIterator", "0." + OGCFilter.OPTION_PAIR_DELIMITER, "\\|");
- * reader.setScanIteratorOption("myIterator", "0." + OGCFilter.OPTION_VALUE_DELIMITER, ":");
- * reader.setScanIteratorOption("myIterator", "0." + OGCFilter.OPTION_FILTER, filter);
- * <pre>
- * </code>
- *  
- * @author William Wall
- */
-public class OGCFilter implements Filter {
-	private static final Logger logger = Logger.getLogger(OGCFilter.class);
-	
-	/** The string that separates the key/value pairs in the row value. */
-	public static final String OPTION_PAIR_DELIMITER = "pairDelimiter";
-	
-	/** The string that separates the key and the value(s) within a pair in the row value. */
-	public static final String OPTION_VALUE_DELIMITER = "valueDelimiter";
-	
-	/** The OGC Filter Encoding XML as a string */
-	public static final String OPTION_FILTER = "filter";
-	
-	/** Specifies the column name for the column family. If this option is not included, the column family is not included in the row. */
-	public static final String OPTION_COLF_NAME = "colfName";
-	
-	/** Specifies the column name for the column qualifier. If this option is not included, the column qualifier is not included in the row. */
-	public static final String OPTION_COLQ_NAME = "colqName";
-	
-	/** Specifies the compare type for the filter. Defaults to "auto". **/
-	public static final String OPTION_COMPARE_TYPE = "compareType";
-	
-	public static final String TYPE_NUMERIC = "numeric";
-	
-	public static final String TYPE_STRING = "string";
-	
-	public static final String TYPE_AUTO = "auto";
-	
-	/** Contains the pair delimiter provided through the <code>OPTION_PAIR_DELIMITER</code> option. */
-	protected String pairDelimiter;
-	
-	/** Contains the value delimiter provided through the <code>OPTION_VALUE_DELIMITER</code> option. */
-	protected String valueDelimiter;
-	
-	/** Contains the column family column name provided through the <code>OPTION_COLF_NAME</code> option. */
-	protected String colfName;
-	
-	/** Contains the column qualifier column name provided through the <code>OPTION_COLQ_NAME</code> option. */
-	protected String colqName;
-	
-	/** The root operation of the query tree **/
-	protected IOperation root;
-	
-	/** The compare type for the query tree **/
-	protected String compareType;
-
-	/**
-	 * Whether or not to accept this key/value entry. A map of row keys and values is parsed and then sent off to the process function to be evaluated.
-	 * @param key The cloudbase entry key
-	 * @param value The cloudbase entry value
-	 * @return True if the entry should be included in the results, false otherwise
-	 */
-	@Override
-	public boolean accept(Key key, Value value) {
-		if (root != null) {
-			Map<String, String> row = getRow(key, value);
-			if (root != null) {
-				return root.execute(row);
-			}
-		}
-		return false;
-	}
-	
-	public boolean accept(Map<String, String> record) {
-		if (root != null) {
-			return root.execute(record);
-		}
-		return false;
-	}
-	
-	/**
-	 * Parses the cloudbase value into a map of key/value pairs. If the <code>OPTION_COLF_NAME</code> 
-	 * or <code>OPTION_COLQ_NAME</code> options were used, then they will also be added to the row map. 
-	 * By default, pairs are delimited by the first unicode character ("\u0000") and values by the last unicode
-	 * character ("\uFFFD"). See the <code>OPTION_PAIR_DELIMITER</code> and <code>OPTION_VALUE_DELIMITER</code> 
-	 * options to change these values.
-	 * 
-	 * @param cbKey The cloudbase entry key
-	 * @param cbValue The cloudbase entry value
-	 * @return A map that represents this row
-	 */
-	protected Map<String, String> getRow(Key cbKey, Value cbValue) {
-		//TODO: This should really be replaced by CBValueFormatter.parse(value.toString()), but I'm hesitant to require
-		// more jars (common-data and google-collections) to be in the cloudbase/lib directory. Also, what do we do with
-		// a field with multiple values? Should we just assume that if any value in that field matches then the row
-		// matches? Or should they all have to match? 
-		
-		String value = cbValue.toString();
-		Map<String, String> row = new HashMap<String, String>();
-		
-		if (colfName != null) {
-			row.put(colfName, cbKey.getColumnFamily().toString());
-		}
-		if (colqName != null) {
-			row.put(colqName, cbKey.getColumnQualifier().toString());
-		}
-		
-		// Determine the start/end of the value.
-		int valueStartIndex = 0;
-		int valueEndIndex = value.length();
-		
-		int vLen = valueDelimiter.length();
-		int fLen = pairDelimiter.length();
-
-		// Parse each of the values from the row value.
-		while (valueStartIndex < valueEndIndex) {
-			int vIndex = value.indexOf(valueDelimiter, valueStartIndex);
-	
-			// If an "equals" sign was found, parse the key and value.
-			if (vIndex != -1) {
-				String key = value.substring(valueStartIndex, vIndex).trim();
-				int v = value.indexOf(valueDelimiter, vIndex + vLen);
-				if (v == -1) {
-					v = valueEndIndex;
-				}
-				int f = value.indexOf(pairDelimiter, vIndex + vLen);
-				if (f == -1) {
-					f = valueEndIndex;
-				}
-				
-				int fIndex = Math.min(f,v);
-				String val = value.substring(vIndex + 1, fIndex).trim();
-				valueStartIndex = f;
-				valueStartIndex += fLen;
-				row.put(key, val);
-			}
-		}
-		
-		return row;
-	}
-
-	@Override
-	public void init(Map<String, String> options) {
-		pairDelimiter = options.get(OPTION_PAIR_DELIMITER);
-		if (pairDelimiter == null || pairDelimiter.length() == 0) {
-			pairDelimiter = "\u0000";
-		}
-		
-		valueDelimiter = options.get(OPTION_VALUE_DELIMITER);
-		if (valueDelimiter == null || valueDelimiter.length() == 0) {
-			valueDelimiter = "\uFFFD";
-		}
-		
-		compareType = options.get(OPTION_COMPARE_TYPE);
-		if (compareType == null || compareType.length() == 0) {
-			compareType = TYPE_AUTO;
-		}
-		
-		colfName = options.get(OPTION_COLF_NAME);
-		colqName = options.get(OPTION_COLQ_NAME);
-		
-		try {
-			DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
-			InputSource is = new InputSource();
-			is.setCharacterStream(new StringReader(options.get(OPTION_FILTER)));
-			Document doc = builder.parse(is);
-			Node filter = doc.getDocumentElement();
-			if (filter.getNodeName().equalsIgnoreCase("filter")) {
-				filter = filter.getFirstChild();
-			}
-			root = createOperationTree(filter);
-		} catch (IOException e) {
-			logger.error(e,e);
-		} catch (SAXException e) {
-			logger.error(e,e);
-		} catch (ParserConfigurationException e) {
-			logger.error(e,e);
-		}
-	}
-	
-	/**
-	 * Creates the operation tree from the filter XML
-	 * @param node The filter XML node to parse
-	 * @return The root IOperation
-	 */
-	protected IOperation createOperationTree(Node node) {
-		try {
-			// instantiate the operation and initialize it
-			Class<? extends IOperation> clazz = CloudbaseClassLoader.loadClass(IOperation.class.getPackage().getName() + "." + node.getNodeName(), IOperation.class);
-			IOperation op = clazz.newInstance();
-			op.init(node, compareType);
-			return op;
-		} catch (ClassNotFoundException e) {
-			logger.warn("Operation not supported: " + node.getNodeName());
-		} catch (InstantiationException e) {
-			logger.error(e,e);
-		} catch (IllegalAccessException e) {
-			logger.error(e,e);
-		}
-		return null;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractComparisonOp.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractComparisonOp.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractComparisonOp.java
deleted file mode 100644
index 46b1cf9..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractComparisonOp.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.List;
-import java.util.Map;
-
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-
-
-
-/**
- * This class provides a simple init method for setting up most of the variables
- * needed to do a comparison operation between two values.
- * 
- * @author William Wall
- */
-public abstract class AbstractComparisonOp {
-	protected String name, literal, value;
-	protected boolean isNumeric = false;
-	protected double literalNum, valueNum;
-	protected String compareType;
-	
-	public void init(Node node, String compareType) {
-		this.compareType = compareType;
-		Node child;
-		NodeList children = node.getChildNodes();
-		for (int i = 0; i < children.getLength(); i++) {
-			child = children.item(i);
-			if (child.getNodeName().equalsIgnoreCase("PropertyName")) {
-				name = child.getTextContent();
-			} else {
-				literal = child.getTextContent();
-			}
-		}
-		
-		if (compareType.equalsIgnoreCase(OGCFilter.TYPE_NUMERIC) || compareType.equalsIgnoreCase(OGCFilter.TYPE_AUTO)) {
-			literalNum = parseNumeric(literal);
-			isNumeric = !Double.isNaN(literalNum);
-		}
-	}
-	
-	public List<IOperation> getChildren() {
-		return null;
-	}
-	
-	protected boolean checkRowNumeric(String s) {
-		if (isNumeric) {
-			valueNum = parseNumeric(s);
-			return valueNum != Double.NaN;
-		}
-		return false;
-	}
-	
-	public String getValue(Map<String, String> row) {
-		String value = row.get(name);
-		
-		// nulls will be lexicographically equal to ""
-		if (value == null) {
-			value = "";
-		}
-		return value;
-	}
-	
-	public static double parseNumeric(String s) {
-		// see if the string can be parsed as a double or an integer
-		double val = Double.NaN;
-		try {
-			val = Double.parseDouble(s);
-		} catch (Exception e) {
-			try {
-				val = new Double(Integer.parseInt(s));
-			} catch (Exception e2) {
-				
-			}
-		}
-		return val;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractLogicalOp.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractLogicalOp.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractLogicalOp.java
deleted file mode 100644
index 0400f61..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/AbstractLogicalOp.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-import cloudbase.start.classloader.CloudbaseClassLoader;
-
-public class AbstractLogicalOp {
-	private static final Logger logger = Logger.getLogger(AbstractLogicalOp.class);
-	
-	List<IOperation> children = new ArrayList<IOperation>();
-	
-	public void init(Node node, String compareType) {
-		Node child;
-		NodeList children = node.getChildNodes();
-		for (int i = 0; i < children.getLength(); i++) {
-			child = children.item(i); 
-			try {
-				Class<? extends IOperation> clazz = CloudbaseClassLoader.loadClass(IOperation.class.getPackage().getName() + "." + child.getNodeName(), IOperation.class);
-				IOperation op = clazz.newInstance();
-				op.init(child, compareType);
-				this.children.add(op);
-			} catch (ClassNotFoundException e) {
-				logger.warn("Operation not supported: " + node.getNodeName());
-			} catch (InstantiationException e) {
-				logger.error(e,e);
-			} catch (IllegalAccessException e) {
-				logger.error(e,e);
-			}
-		}
-	}
-	
-	public List<IOperation> getChildren() {
-		return children;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/And.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/And.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/And.java
deleted file mode 100644
index b192b19..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/And.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * Executes a logical AND on all the child operations.
- * 
- * <code>
- * <pre>
- * &lt;And&gt;
- * 	&lt;PropertyIsEqualTo&gt;...
- * 	&lt;PropertyIsLessThan&gt;...
- * &lt;/And&gt;
- * </pre>
- * </code>
- * 
- * @author William Wall
- */
-public class And extends AbstractLogicalOp implements IOperation {
-	@Override
-	public boolean execute(Map<String, String> row) {
-		boolean result = true;
-		for (int i = 0; i < children.size(); i++) {
-			result = children.get(i).execute(row);
-			if (!result) break;
-		}
-		return result;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/BBOX.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/BBOX.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/BBOX.java
deleted file mode 100644
index 8596338..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/BBOX.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.awt.Rectangle;
-import java.awt.Shape;
-import java.awt.geom.Point2D;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-
-/**
- * Tests a row to see if it falls within the given shape. The shape should be
- * defined in degrees. There is no need to send a property name since all the
- * rows contain either lonself/latself or lon/lat fields.
- * 
- * Example:
- * <pre>
- * 	&lt;BBOX&gt;
- * 		&lt;gml:Envelope&gt;
- * 			&lt!-- coordinates are in lon/lat order --&gt;
- * 			&lt;gml:LowerCorner&gt;13.09 31.5899&lt;/gml:LowerCorner&gt;
- * 			&lt;gml:UpperCorner&gt;35.725 42.8153&lt;/gml:UpperCorner&gt;
- * 		&lt;/gml:Envelope&gt;
- * 	&lt;/BBOX&gt;
- * </pre>
- * 
- * @author William Wall
- */
-public class BBOX implements IOperation {
-	private static final Logger logger = Logger.getLogger(BBOX.class);
-	
-	Shape shape;
-	
-	// longitude column names in order of priority
-	protected static final String[] LON_NAMES = {
-		"lonself",
-		"lon",
-		"long",
-		"longitude"
-	};
-	
-	// latitude column names in order of priority
-	protected static final String[] LAT_NAMES = {
-		"latself",
-		"lat",
-		"latitude"
-	};
-	
-	@Override
-	public boolean execute(Map<String, String> row) {
-		Point2D p = BBOX.getPoint(row);
-		
-		if (p != null && shape != null) {
-			if (shape.contains(p)) {
-				return true;
-			} else {
-				// attempt to normalize the point into the shape in the event that the shape
-				// bounds are outside of -180 to 180
-				Rectangle bounds = shape.getBounds();
-				while (p.getX() < bounds.getMinX()) {
-					p.setLocation(p.getX() + 360, p.getY());
-				}
-				while (p.getX() > bounds.getMaxX()) {
-					p.setLocation(p.getX() - 360, p.getY());
-				}
-				
-				return shape.contains(p);
-			}
-		}
-		
-		return false;
-	}
-
-	@Override
-	public List<IOperation> getChildren() {
-		return null;
-	}
-
-	@Override
-	public void init(Node node, String compareType) {
-		NodeList children = node.getChildNodes();
-		for (int i = 0; i < children.getLength(); i++) {
-			shape = ShapeFactory.getShape(children.item(i));
-			if (shape != null) {
-				break;
-			}
-		}
-		
-	}
-	
-	/**
-	 * Gets a point that represents the location of this row. See 
-	 * @param row
-	 * @return The point object as (x - Longitude, y - Latitude)
-	 */
-	protected static Point2D getPoint(Map<String, String> row) {
-		Point2D.Double p = new Point2D.Double();
-		p.x = getDegree(row, LON_NAMES);
-		p.y = getDegree(row, LAT_NAMES);
-		return p;
-	}
-	
-	protected static double getDegree(Map<String, String> row, String[] cols) {
-		double num = Double.NaN;
-		String value;
-		for (int i = 0; i < cols.length; i++) {
-			if (row.containsKey(cols[i])) {
-				value = row.get(cols[i]);
-				if (value != null && !value.equals("-")) {
-					try {
-						num = Double.parseDouble(value);
-						break;
-					} catch (NumberFormatException e) {
-						logger.warn("Could not parse degree value from " + cols[i] + " = " + value);
-					}
-				}
-			}
-		}
-		
-		return num;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/IOperation.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/IOperation.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/IOperation.java
deleted file mode 100644
index 6ad8ebe..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/IOperation.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.List;
-import java.util.Map;
-
-import org.w3c.dom.Node;
-
-public interface IOperation {
-	
-	/**
-	 * Sets up the operation from the filter XML node
-	 * @param node The node
-	 * @param compareType The compare type. Defaults to "auto", but you can force it to be "numeric" or "string".
-	 */
-	public void init(Node node, String compareType);
-	
-	/**
-	 * Executes the operation indicated by the given node in the query
-	 * tree.
-	 * @param row The key/value pairs for the current row
-	 * @return The boolean evaluation of the operation
-	 */
-	public boolean execute(Map<String, String> row);
-	
-	/**
-	 * Returns the nodes children. This is only applicable to logical
-	 * operations (AND, OR, NOT).
-	 */
-	public List<IOperation> getChildren();
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Not.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Not.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Not.java
deleted file mode 100644
index 74826a8..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Not.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * Executes a logical NOT on the child operations. If there is a single child, then 
- * the operation is NOT. If more than one child exists, this operation defaults to
- * NOR behavior. For NAND behavior, make a single AND child of NOT.
- * 
- * <code>
- * <pre>
- * &lt;Not&gt;
- * 	&lt;PropertyIsEqualTo&gt;...
- * &lt;/Not&gt;
- * </pre>
- * </code>
- * 
- * @author William Wall
- *
- */
-public class Not extends AbstractLogicalOp implements IOperation {
-
-	@Override
-	public boolean execute(Map<String, String> row) {
-		// For typical NOT behavior, a NOT group should have one child. If it has more than one child, it behaves
-		// like NOR. NAND/NOR behavior can be implemented by giving the Not group a child group of AND/OR.
-		boolean result = true;
-		for (int i = 0; i < children.size(); i++) {
-			result = !children.get(i).execute(row);
-			// in the case that there are multiple children, treat them as NOR
-			if (!result) break;
-		}
-		return result;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Or.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Or.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Or.java
deleted file mode 100644
index 0a3dd6e..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/Or.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * Executes a logical OR on the child operations.
- * 
- * <code>
- * <pre>
- * &lt;Or&gt;
- * 	&lt;PropertyIsEqualTo&gt;...
- * 	&lt;PropertyIsLessThan&gt;...
- * &lt;/Or&gt;
- * </pre>
- * </code>
- * 
- * @author William Wall
- */
-public class Or extends AbstractLogicalOp implements IOperation {
-	@Override
-	public boolean execute(Map<String, String> row) {
-		boolean result = false;
-		for (int i = 0; i < children.size(); i++) {
-			result = children.get(i).execute(row);
-			if (result) break;
-		}
-		return result;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsBetween.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsBetween.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsBetween.java
deleted file mode 100644
index 2c9d86c..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsBetween.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.List;
-import java.util.Map;
-
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-import ss.cloudbase.core.iterators.filter.ogc.OGCFilter;
-
-
-/**
- * An operation that determines if the row's value is between the given
- * boundary values.
- * 
- * Example:
- * <pre>
- * 	&lt;PropertyIsBetween&gt;
- * 		&lt;PropertyName&gt;height&lt;/PropertyName&gt;
- * 		&lt;LowerBoundary&gt;&lt;Literal&gt;180&lt;/Literal&gt;&lt;/LowerBoundary&gt;
- * 		&lt;UpperBoundary&gt;&lt;Literal&gt;200&lt;/Literal&gt;&lt;/UpperBoundary&gt;
- * 	&lt;/PropertyIsBetween&gt;
- * </pre>
- * 
- * @author William Wall
- */
-public class PropertyIsBetween implements IOperation {
-	String name;
-	String lower, upper, value;
-	double lowerNum, upperNum, valueNum;
-	boolean isNumeric = false;
-	
-	@Override
-	public boolean execute(Map<String, String> row) {
-		if (isNumeric) {
-			valueNum = AbstractComparisonOp.parseNumeric(row.get(name));
-			if (valueNum != Double.NaN) {
-				return lowerNum <= valueNum && valueNum <= upperNum;
-			}
-		}
-		
-		value = row.get(name);
-		if (value == null) {
-			value = "";
-		}
-		
-		return value.compareTo(lower) > -1 && value.compareTo(upper) < 1;
-	}
-
-	@Override
-	public List<IOperation> getChildren() {
-		return null;
-	}
-
-	@Override
-	public void init(Node node, String compareType) {
-		Node child;
-		NodeList children = node.getChildNodes();
-		for (int i = 0; i < children.getLength(); i++) {
-			child = children.item(i);
-			if (child.getNodeName().equalsIgnoreCase("PropertyName")) {
-				name = child.getTextContent();
-			} else if (child.getNodeName().equalsIgnoreCase("LowerBoundary")) {
-				lower = child.getTextContent();
-			} else if (child.getNodeName().equalsIgnoreCase("UpperBoundary")) {
-				upper = child.getTextContent();
-			}
-		}
-		
-		if (compareType.equalsIgnoreCase(OGCFilter.TYPE_NUMERIC) || compareType.equalsIgnoreCase(OGCFilter.TYPE_AUTO)) {
-			upperNum = AbstractComparisonOp.parseNumeric(upper);
-			lowerNum = AbstractComparisonOp.parseNumeric(lower);
-			isNumeric = !Double.isNaN(upperNum) && !Double.isNaN(lowerNum);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsEqualTo.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsEqualTo.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsEqualTo.java
deleted file mode 100644
index 93fa3f5..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsEqualTo.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * An operation to see whether the values are equal or not.
- * 
- * Example:
- * <pre>
- * 	&lt;PropertyIsEqualTo&gt;
- * 		&lt;PropertyName&gt;user&lt;/PropertyIsEqualTo&gt;
- * 		&lt;Literal&gt;CmdrTaco&lt;/Literal&gt;
- *  &lt;/PropertyIsEqualTo&gt;
- * </pre>
- * 
- * @author William Wall
- */
-public class PropertyIsEqualTo extends AbstractComparisonOp implements IOperation {
-
-	@Override
-	public boolean execute(Map<String, String> row) {
-		value = getValue(row);
-		
-		if (checkRowNumeric(value)) {
-			return valueNum == literalNum;
-		}
-		
-		return value.equals(literal);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThan.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThan.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThan.java
deleted file mode 100644
index 8eb9ec0..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThan.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * An operation to see if the row value is greater than the given value.
- * 
- * Example:
- * <pre>
- * 	&lt;PropertyIsGreaterThan&gt;
- * 		&lt;PropertyName&gt;height&lt;/PropertyName&gt;
- * 		&lt;Literal&gt;200&lt;/Literal&gt;
- *	&lt;/PropertyIsGreaterThan&gt;
- * </pre>
- * @author William Wall
- */
-public class PropertyIsGreaterThan extends AbstractComparisonOp implements IOperation {
-
-	@Override
-	public boolean execute(Map<String, String> row) {
-		value = getValue(row);
-		
-		if (checkRowNumeric(value)) {
-			return valueNum > literalNum;
-		}
-		
-		return value.compareTo(literal) > 0;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThanOrEqualTo.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThanOrEqualTo.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThanOrEqualTo.java
deleted file mode 100644
index 17fb1dc..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsGreaterThanOrEqualTo.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * An operation to see if the row value is greater than or equal to the given value.
- * 
- * Example:
- * 	<pre>
- * 	&lt;PropertyIsGreaterThanOrEqualTo&gt;
- * 		&lt;PropertyName&gt;height&lt;/PropertyName&gt;
- * 		&lt;Literal&gt;100&lt;/Literal&gt;
- * 	&lt;/PropertyIsGreaterThanOrEqualTo&gt;
- * 	</pre>
- * @author William Wall
- */
-public class PropertyIsGreaterThanOrEqualTo extends AbstractComparisonOp implements IOperation {
-
-	@Override
-	public boolean execute(Map<String, String> row) {
-		value = getValue(row);
-		
-		if (checkRowNumeric(value)) {
-			return valueNum >= literalNum;
-		}
-		
-		return value.compareTo(literal) > -1;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThan.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThan.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThan.java
deleted file mode 100644
index f094c99..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThan.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * An operation to see if the row value is less than the given value.
- * 
- * Example:
- * <pre>
- * 	&lt;PropertyIsLessThan&gt;
- * 		&lt;PropertyName&gt;height&lt;/PropertyName&gt;
- * 		&lt;Literal&gt;180&lt;/Literal&gt;
- *	&lt;/PropertyIsLessThan&gt;
- * </pre>
- * 
- * @author William Wall
- */
-public class PropertyIsLessThan extends AbstractComparisonOp implements IOperation {
-
-	@Override
-	public boolean execute(Map<String, String> row) {
-		value = getValue(row);
-		
-		if (checkRowNumeric(value)) {
-			return valueNum < literalNum;
-		}
-		
-		return value.compareTo(literal) < 0;
-	}
-	
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThanOrEqualTo.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThanOrEqualTo.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThanOrEqualTo.java
deleted file mode 100644
index 9b01aae..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLessThanOrEqualTo.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * An operation to see if the row value is less than or equal to the given value.
- * 
- * <pre>
- * 	&lt;PropertyIsLessThanOrEqualTo&gt;
- * 		&lt;PropertyName&gt;height&lt;/PropertyName&gt;
- * 		&lt;Literal&gt;100&lt;/Literal&gt;
- * 	&lt;/PropertyIsLessThanOrEqualTo&gt;
- * </pre>
- * 
- * @author William Wall
- */
-public class PropertyIsLessThanOrEqualTo extends AbstractComparisonOp implements IOperation {
-
-	@Override
-	public boolean execute(Map<String, String> row) {
-		value = getValue(row);
-		
-		if (checkRowNumeric(value)) {
-			return valueNum <= literalNum;
-		}
-		
-		return value.compareTo(literal) < 1;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLike.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLike.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLike.java
deleted file mode 100644
index ad38951..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsLike.java
+++ /dev/null
@@ -1,144 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.List;
-import java.util.Map;
-
-import org.w3c.dom.NamedNodeMap;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-/**
- * An operation that determines if the row value is like the given value. This
- * operation supports wildcards (*).
- * 
- * Example:
- * 
- * <pre>
- * 	&lt;PropertyIsLike&gt;
- * 		&lt;PropertyName&gt;city&lt;/PropertyName&gt;
- * 		&lt;Literal&gt;new*&lt;/Literal&gt;
- * 	&lt;/PropertyIsLike&gt;
- * </pre>
- * 
- * @author William Wall
- * 
- */
-public class PropertyIsLike implements IOperation {
-	String pattern;
-	String name;
-
-	@Override
-	public boolean execute(Map<String, String> row) {
-		String value = row.get(name);
-		if (value == null) {
-			value = "";
-		}
-
-		return value.matches(pattern);
-	}
-
-	@Override
-	public List<IOperation> getChildren() {
-		return null;
-	}
-
-	@Override
-	public void init(Node node, String compareType) {
-		Node child;
-		NodeList children = node.getChildNodes();
-		for (int i = 0; i < children.getLength(); i++) {
-			child = children.item(i);
-			if (child.getNodeName().equalsIgnoreCase("PropertyName")) {
-				name = child.getTextContent();
-			} else {
-				pattern = child.getTextContent();
-			}
-		}
-
-		pattern = convertToRegex(node, pattern);
-	}
-
-	/**
-	 * Converts the pattern, wild card, single and escape characters to the
-	 * regular expression equivalents. Everything else in the pattern is treated
-	 * as a regex literal.
-	 * 
-	 * @param node The PropertyIsLike node
-	 * @param likePattern The initial like pattern 
-	 * 
-	 * @return the equivalent regular expression string.
-	 */
-	public String convertToRegex(Node node, String likePattern) {
-		// Convert the pattern to a regular expression.
-		StringBuilder regex = new StringBuilder();
-		
-		NamedNodeMap attr = node.getAttributes();
-		
-		String wildCard = "*";
-		if (attr.getNamedItem("wildCard") != null) {
-			wildCard = attr.getNamedItem("wildCard").toString();
-		}
-		
-		String escapeChar = "\\";
-		if (attr.getNamedItem("escapeChar") != null) {
-			 escapeChar = attr.getNamedItem("escapeChar").toString();
-		}
-		
-		String singleChar = ".";
-		if (attr.getNamedItem("singleChar") != null) {
-			singleChar = attr.getNamedItem("singleChar").toString();
-		}
-		
-		int escapeCharIndex = likePattern.indexOf(escapeChar);
-
-		// These are required in WFS but we'll handle null values here.
-		int wildCardIndex = wildCard == null ? -1 : likePattern.indexOf(wildCard);
-		int singleCharIndex = singleChar == null ? -1 : likePattern.indexOf(singleChar);
-		for (int index = 0; index < likePattern.length(); index++) {
-			char ch = likePattern.charAt(index);
-			if (index == escapeCharIndex) {
-				escapeCharIndex = likePattern.indexOf(escapeChar, escapeCharIndex + escapeChar.length());
-
-				// If there are consecutive escape characters, skip to the
-				// next one to save it in the regex.
-				if (index + 1 == escapeCharIndex) {
-					escapeCharIndex = likePattern.indexOf(escapeChar, escapeCharIndex + escapeChar.length());
-				} else if (index + 1 == wildCardIndex) {
-					wildCardIndex = likePattern.indexOf(wildCard, wildCardIndex + wildCard.length());
-				} else if (index + 1 == singleCharIndex) {
-					singleCharIndex = likePattern.indexOf(singleChar, singleCharIndex + singleChar.length());
-				} else {
-					// This is an undefined condition, just skip the escape
-					// character.
-				}
-			}
-
-			// Insert the regular expression equivalent of a wild card.
-			else if (index == wildCardIndex) {
-				regex.append(".*");
-				index += wildCard.length() - 1;
-				wildCardIndex = likePattern.indexOf(wildCard, wildCardIndex + wildCard.length());
-			}
-
-			// Insert the regular expression equivalent of the single char.
-			else if (index == singleCharIndex) {
-				regex.append(".");
-				index += singleChar.length() - 1;
-				singleCharIndex = likePattern.indexOf(singleChar, singleCharIndex + singleChar.length());
-			}
-
-			// Handle certain characters in a special manner.
-			else if (('[' == ch) || (']' == ch) || ('\\' == ch) || ('^' == ch)) {
-				regex.append('\\').append(ch);
-			}
-
-			// Force everything else to be literals.
-			else {
-				regex.append('[').append(ch).append(']');
-			}
-		}
-		
-		// add case insensitive flag and start match at beginning of the string
-		return "(?i)^" + regex.toString();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNotEqualTo.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNotEqualTo.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNotEqualTo.java
deleted file mode 100644
index b62e6c6..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNotEqualTo.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.Map;
-
-/**
- * An operation that determines if the row value is not equal to the given value.
- * 
- * Example:
- * <pre>
- * 	&lt;PropertyIsNotEqualTo&gt;
- * 		&lt;PropertyName&gt;weather&lt;/PropertyName&gt;
- * 		&lt;Literal&gt;rainy&lt;/Literal&gt;
- * 	&lt;/PropertyIsNotEqualTo&gt;
- * </pre>
- * 
- * @author William Wall
- *
- */
-public class PropertyIsNotEqualTo extends AbstractComparisonOp implements IOperation {
-	@Override
-	public boolean execute(Map<String, String> row) {
-		value = getValue(row);
-		
-		if (checkRowNumeric(value)) {
-			return valueNum != literalNum;
-		}
-		
-		return !value.equals(literal);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNull.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNull.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNull.java
deleted file mode 100644
index d5d67c2..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/PropertyIsNull.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.util.List;
-import java.util.Map;
-
-import org.w3c.dom.Node;
-
-/**
- * An operation to determine if the row value is null. Nulls and empty strings will both match.
- *
- * Example:
- * <pre>
- * 	&lt;PropertyIsNull&gt;
- * 		&lt;PropertyName&gt;socialSkills&lt;/PropertyName&gt;
- * 	&lt;/PropertyIsNull&gt;
- * </pre>
- * 
- * @author William Wall
- */
-public class PropertyIsNull implements IOperation {
-	String name;
-	
-	@Override
-	public boolean execute(Map<String, String> row) {
-		String value = row.get(name);
-		return value == null || value.length() == 0;
-	}
-
-	@Override
-	public List<IOperation> getChildren() {
-		return null;
-	}
-
-	@Override
-	public void init(Node node, String compareType) {
-		name = node.getTextContent();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/ShapeFactory.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/ShapeFactory.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/ShapeFactory.java
deleted file mode 100644
index 10fea78..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/operation/ShapeFactory.java
+++ /dev/null
@@ -1,133 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.operation;
-
-import java.awt.Rectangle;
-import java.awt.Shape;
-import java.awt.geom.Ellipse2D;
-import java.awt.geom.Path2D;
-import java.awt.geom.Point2D;
-
-import org.apache.log4j.Logger;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-import ss.cloudbase.core.iterators.filter.ogc.util.GeoUtil;
-
-
-public class ShapeFactory {
-	private static final Logger logger = Logger.getLogger(ShapeFactory.class);
-	
-	public static Shape getShape(Node node) {
-		if (node.getNodeName().equalsIgnoreCase("gml:Envelope")) {
-			return parseEnvelope(node);
-		} else if (node.getNodeName().equalsIgnoreCase("gml:Polygon")) {
-			return parsePolygon(node);
-		} else if (node.getNodeName().equalsIgnoreCase("gml:CircleByCenterPoint")) {
-			return parseCircle(node);
-		}
-		
-		logger.warn("No parser implemented for: " + node.getLocalName());
-		return null;
-	}
-	
-	protected static Shape parseEnvelope(Node node) {
-		Rectangle rect = null;
-		
-		Node child;
-		NodeList children = node.getChildNodes();
-		for (int i = 0; i < children.getLength(); i++) {
-			child = children.item(i);
-			String[] parts = child.getTextContent().split("\\s");
-			
-			if (parts.length == 2) {
-				double lon = Double.parseDouble(parts[0]);
-				double lat = Double.parseDouble(parts[1]);
-				
-				if (rect == null) {
-					rect = new Rectangle();
-					rect.setFrame(lon, lat, 0, 0);
-				} else {
-					rect.add(lon, lat);
-				}
-			}
-		}
-
-		// If the rectangle width is greater than 180 degrees, the user most likely
-		// meant to use the inverse BBOX (where the east value is less than the west).
-		// This is for clients that wrap coordinates rather than use absolute coordinates.
-		if (rect.getWidth() > 180) {
-			rect.setFrame(rect.getMaxX(), rect.getMaxY(), 360 - rect.getWidth(), rect.getHeight());
-		}
-		
-		return rect;
-	}
-	
-	protected static Shape parsePolygon(Node node) {
-		Path2D poly = null;
-		
-		String text = node.getTextContent();
-		String[] list = text.split("\\s");
-		
-		for (int i = 1; i < list.length; i += 2) {
-			double lon = Double.parseDouble(list[i-1]);
-			double lat = Double.parseDouble(list[i]);
-			if (poly == null) {
-				poly = new Path2D.Double();
-				poly.moveTo(lon, lat);
-			} else {
-				poly.lineTo(lon, lat);
-			}
-		}
-		
-		return poly;
-	}
-	
-	protected static Shape parseCircle(Node node) {
-		Ellipse2D circle = null;
-		
-		double radius = Double.NaN, lon = Double.NaN, lat = Double.NaN;
-		String units = null;
-		
-		Node child;
-		NodeList children = node.getChildNodes();
-		try {
-			for (int i = 0; i < children.getLength(); i++) {
-				child = children.item(i);
-				if (child.getNodeName().equalsIgnoreCase("gml:radius")) {
-					radius = Double.parseDouble(child.getTextContent());
-					units = child.getAttributes().getNamedItem("uom").getTextContent();
-				} else {
-					String[] list = child.getTextContent().split("\\s");
-					lon = Double.parseDouble(list[0]);
-					lat = Double.parseDouble(list[1]);
-				}
-			}
-			
-			radius = convertToKM(radius, units);
-			Point2D center = new Point2D.Double(lon, lat);
-			Point2D end = GeoUtil.calculateEndLocation(center, radius, lat > 0 ? 180: 0);
-			
-			radius = Math.abs(end.getY() - lat);
-			circle = new Ellipse2D.Double();
-			circle.setFrameFromCenter(center, new Point2D.Double(center.getX() + radius, center.getY() + radius));
-		} catch (NumberFormatException e) {
-			
-		} catch (ArrayIndexOutOfBoundsException e) {
-			
-		}
-		
-		return circle;
-	}
-	
-	private static double convertToKM(double radius, String units) {
-		if (units.equalsIgnoreCase("km")) {
-			return radius;
-		} else if (units.equalsIgnoreCase("m")) {
-			return radius / 1000;
-		} else if (units.equalsIgnoreCase("mi")) {
-			return 0.621371192 * radius;
-		} else if (units.equalsIgnoreCase("ft")) {
-			return radius / 3280.8399;
-		}
-		return radius;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/util/GeoUtil.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/util/GeoUtil.java b/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/util/GeoUtil.java
deleted file mode 100644
index 95259fa..0000000
--- a/partition/common-query/src/main/java/ss/cloudbase/core/iterators/filter/ogc/util/GeoUtil.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package ss.cloudbase.core.iterators.filter.ogc.util;
-
-import java.awt.geom.Point2D;
-
-public class GeoUtil {
-	/**
-	 * Calculates an ending location from a point, distance, and bearing
-	 * @param point The start point
-	 * @param distance The distance from the start point in kilometers
-	 * @param bearing The bearing (in degrees) where north is 0
-	 * @return The resulting point
-	 */
-	public static Point2D calculateEndLocation(Point2D point, double distance, double bearing) {
-		double r = 6371; // earth's mean radius in km
-
-		double lon1 = Math.toRadians(point.getX());	
-		double lat1 = Math.toRadians(point.getY());
-		bearing = Math.toRadians(bearing);
-	
-		double lat2 = Math.asin( Math.sin(lat1) * Math.cos(distance/r) + Math.cos(lat1) * Math.sin(distance/r) * Math.cos(bearing) );
-		double lon2 = lon1 + Math.atan2(Math.sin(bearing) * Math.sin(distance/r) * Math.cos(lat1), Math.cos(distance/r) - Math.sin(lat1) * Math.sin(lat2));
-		
-		lon2 = (lon2+Math.PI)%(2*Math.PI) - Math.PI;  // normalise to -180...+180
-	
-		if (Double.isNaN(lat2) || Double.isNaN(lon2)) return null;
-	
-		lon2 = Math.toDegrees(lon2);
-		lat2 = Math.toDegrees(lat2);
-		
-		return new Point2D.Double(lon2, lat2);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/GVDateFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/GVDateFilterTest.java b/partition/common-query/src/test/java/GVDateFilterTest.java
deleted file mode 100644
index 8ea5578..0000000
--- a/partition/common-query/src/test/java/GVDateFilterTest.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.filter.general.GVDateFilter;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.security.Authorizations;
-
-/**
- *
- * @author rashah
- */
-public class GVDateFilterTest
-{
-
-  private Connector cellLevelConn;
-  private Connector serializedConn;
-  private static final String TABLE = "partition";
-  private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-
-
-
-  protected Connector getSerializedConnector()
-  {
-    if (serializedConn == null)
-    {
-      serializedConn = SampleGVData.initConnector();
-      SampleGVData.writeDenSerialized(serializedConn, SampleGVData.sampleData());
-    }
-    return serializedConn;
-  }
-
-
-
-  protected Scanner getSerializedScanner()
-  {
-    Connector c = getSerializedConnector();
-    try
-    {
-      return c.createScanner(TABLE, AUTHS);
-    }
-    catch (TableNotFoundException e)
-    {
-      return null;
-    }
-  }
-
-  protected Scanner setUpGVDFFilter(Scanner s, String timesta)
-  {
-    try
-    {
-  
-      s.setScanIterators(50, FilteringIterator.class.getName(), "gvdf");
-      s.setScanIteratorOption("gvdf", "0", GVDateFilter.class.getName());
-      s.setScanIteratorOption("gvdf", "0." + GVDateFilter.OPTIONInTimestamp, timesta);
-
-    }
-    catch (IOException e)
-    {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-    }
-    return s;
-  }
-
-  protected String checkSerialized(Scanner s)
-  {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (Entry<Key, Value> e : s)
-    {
-
-      if (!first)
-      {
-        sb.append(",");
-      }
-      else
-      {
-        first = false;
-      }
-
-      String colq = e.getKey().getColumnQualifier().toString();
-
-      sb.append(colq);
-    }
-    return sb.toString();
-  }
-
-
-  @Test
-  public void testNoResults()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2008-03-03T20:44:28.633Z");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals(""));
-  }
-
-
-  @Test
-  public void testOneResult()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2011-03-03T20:44:28.633Z");
-    s.setRange(new Range());
-
-    System.out.println(checkSerialized(s));
-
-    assertTrue(checkSerialized(s).equals("03"));
-  }
-
-  @Test
-  public void testTwoResults()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2009-03-03T20:44:28.633Z");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("04,01"));
-  }
-
-    @Test
-  public void testThreeResults()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2010-03-01T20:44:28.633Z");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("04,01,03"));
-  }
-
-  @Test
-  public void testDummyTest()
-  {
-    assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/common-query/src/test/java/GVFrequencyFilterTest.java
----------------------------------------------------------------------
diff --git a/partition/common-query/src/test/java/GVFrequencyFilterTest.java b/partition/common-query/src/test/java/GVFrequencyFilterTest.java
deleted file mode 100644
index 25c602a..0000000
--- a/partition/common-query/src/test/java/GVFrequencyFilterTest.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.io.Text;
-import org.junit.Test;
-
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.filter.general.GVFrequencyFilter;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.iterators.FilteringIterator;
-import cloudbase.core.security.Authorizations;
-
-/**
- *
- * @author rashah
- */
-public class GVFrequencyFilterTest
-{
-
-  private Connector cellLevelConn;
-  private Connector serializedConn;
-  private static final String TABLE = "partition";
-  private static final Authorizations AUTHS = new Authorizations("ALPHA,BETA,GAMMA".split(","));
-
-
-
-  protected Connector getSerializedConnector()
-  {
-    if (serializedConn == null)
-    {
-      serializedConn = SampleGVData.initConnector();
-      SampleGVData.writeDenSerialized(serializedConn, SampleGVData.sampleData());
-    }
-    return serializedConn;
-  }
-
-
-
-  protected Scanner getSerializedScanner()
-  {
-    Connector c = getSerializedConnector();
-    try
-    {
-      return c.createScanner(TABLE, AUTHS);
-    }
-    catch (TableNotFoundException e)
-    {
-      return null;
-    }
-  }
-
-  protected Scanner setUpGVDFFilter(Scanner s, String Frequency)
-  {
-    try
-    {
-      s.clearScanIterators();
-  
-      s.setScanIterators(50, FilteringIterator.class.getName(), "gvff");
-      s.setScanIteratorOption("gvff", "0", GVFrequencyFilter.class.getName());
-      s.setScanIteratorOption("gvff", "0." + GVFrequencyFilter.OPTIONFrequency, Frequency);
-
-    }
-    catch (IOException e)
-    {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-    }
-    return s;
-  }
-
-  protected String checkSerialized(Scanner s)
-  {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (Entry<Key, Value> e : s)
-    {
-
-      if (!first)
-      {
-        sb.append(",");
-      }
-      else
-      {
-        first = false;
-      }
-
-      String colq = e.getKey().getColumnQualifier().toString();
-
-      //System.out.println(e.getKey()+"\t"+e.getValue());
-
-      sb.append(colq);
-    }
-    return sb.toString();
-  }
-
-  @Test
-  public void testNoMatch()
-  {
-
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "2000000000");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).isEmpty());
-  }
-
-  @Test
-  public void testSingleMatch()
-  {
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "1500000000");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("01"));
-  }
-
-
-  @Test
-  public void testDoubleMatch()
-  {
-    Scanner s = setUpGVDFFilter(getSerializedScanner(), "1200000000");
-    s.setRange(new Range());
-
-    assertTrue(checkSerialized(s).equals("01,03"));
-  }
-
-  @Test
-  public void testDummyTest()
-  {
-    assertTrue(true);
-  }
-
-}


[42/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java
index 48f0931..ba37ca1 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/NonCloseableRyaStatementCursorIterator.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb.iter;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.util.Iterator;
 
 import mvm.rya.api.domain.RyaStatement;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
index b699d96..ce21ff7 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb.iter;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java
index d69ab65..83bd2d4 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterable.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb.iter;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java
index 8b2ae3b..8df2c60 100644
--- a/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java
+++ b/dao/mongodb.rya/src/main/java/mvm/rya/mongodb/iter/RyaStatementCursorIterator.java
@@ -1,5 +1,25 @@
 package mvm.rya.mongodb.iter;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import info.aduna.iteration.CloseableIteration;
 
 import java.util.Iterator;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/dao/pom.xml
----------------------------------------------------------------------
diff --git a/dao/pom.xml b/dao/pom.xml
index 315d406..604b30c 100644
--- a/dao/pom.xml
+++ b/dao/pom.xml
@@ -1,17 +1,39 @@
 <?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>parent</artifactId>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
+
     <artifactId>rya.dao</artifactId>
+    <name>Apache Rya DAO Projects</name>
+
     <packaging>pom</packaging>
-    <name>${project.groupId}.${project.artifactId}</name>
+
     <modules>
         <module>accumulo.rya</module>
-		<module>mongodb.rya</module>
+        <module>mongodb.rya</module>
     </modules>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/pom.xml
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/pom.xml b/extras/cloudbase.rya.giraph/pom.xml
deleted file mode 100644
index 2552197..0000000
--- a/extras/cloudbase.rya.giraph/pom.xml
+++ /dev/null
@@ -1,69 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.extras</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>cloudbase.rya.giraph</artifactId>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.giraph</groupId>
-            <artifactId>giraph</artifactId>
-            <version>0.2-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>cloudbase.rya</artifactId>
-        </dependency>
-    </dependencies>
-    <profiles>
-        <profile>
-            <id>mr</id>
-            <build>
-                <plugins>
-                    <plugin>
-                        <!-- NOTE: We don't need a groupId specification because the group is
-                  org.apache.maven.plugins ...which is assumed by default. -->
-                        <artifactId>maven-assembly-plugin</artifactId>
-                        <dependencies>
-                            <dependency>
-                                <groupId>mvm.cloud</groupId>
-                                <artifactId>hadoop-job-assembly</artifactId>
-                                <version>1.0.0-SNAPSHOT</version>
-                            </dependency>
-                        </dependencies>
-                        <executions>
-                            <execution>
-                                <id>make-assembly</id>
-                                <phase>package</phase>
-                                <goals>
-                                    <goal>single</goal>
-                                </goals>
-                                <configuration>
-                                    <attach>false</attach>
-                                    <descriptors>
-                                        <descriptor>assemblies/job.xml</descriptor>
-                                    </descriptors>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexInputFormat.java
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexInputFormat.java b/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexInputFormat.java
deleted file mode 100644
index 490b64d..0000000
--- a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexInputFormat.java
+++ /dev/null
@@ -1,88 +0,0 @@
-package mvm.rya.cloudbase.giraph.format;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.collect.Maps;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import org.apache.giraph.graph.BspUtils;
-import org.apache.giraph.graph.Vertex;
-import org.apache.giraph.graph.VertexReader;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Date: 7/27/12
- * Time: 1:39 PM
- */
-public class CloudbaseRyaVertexInputFormat
-        extends CloudbaseVertexInputFormat<Text, Text, Text, Text> {
-
-    private Configuration conf;
-
-    public VertexReader<Text, Text, Text, Text>
-    createVertexReader(InputSplit split, TaskAttemptContext context)
-            throws IOException {
-        try {
-
-            return new CloudbaseEdgeVertexReader(
-                    cloudbaseInputFormat.createRecordReader(split, context)) {
-            };
-        } catch (InterruptedException e) {
-            throw new IOException(e);
-        }
-
-    }
-
-    /*
-       Reader takes Key/Value pairs from the underlying input format.
-    */
-    public static class CloudbaseEdgeVertexReader
-            extends CloudbaseVertexReader<Text, Text, Text, Text> {
-
-        private RyaContext ryaContext = RyaContext.getInstance();
-
-        public CloudbaseEdgeVertexReader(RecordReader<Key, Value> recordReader) {
-            super(recordReader);
-        }
-
-
-        public boolean nextVertex() throws IOException, InterruptedException {
-            return getRecordReader().nextKeyValue();
-        }
-
-        /*
-       Each Key/Value contains the information needed to construct the vertices.
-         */
-        public Vertex<Text, Text, Text, Text> getCurrentVertex()
-                throws IOException, InterruptedException {
-            try {
-                Key key = getRecordReader().getCurrentKey();
-                Value value = getRecordReader().getCurrentValue();
-                RyaStatement ryaStatement = ryaContext.deserializeTriple(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO,
-                        new TripleRow(key.getRow().getBytes(), key.getColumnFamily().getBytes(),
-                                key.getColumnQualifier().getBytes()));//TODO: assume spo for now
-                Vertex<Text, Text, Text, Text> vertex =
-                        BspUtils.<Text, Text, Text, Text>createVertex(
-                                getContext().getConfiguration());
-                Text vertexId = new Text(ryaStatement.getSubject().getData()); //TODO: set Text?
-                Map<Text, Text> edges = Maps.newHashMap();
-                Text edgeId = new Text(ryaStatement.getPredicate().getData());
-                edges.put(edgeId, new Text(ryaStatement.getObject().getData()));
-                vertex.initialize(vertexId, new Text(), edges, null);
-
-                return vertex;
-            } catch (Exception e) {
-                throw new IOException(e);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexOutputFormat.java
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexOutputFormat.java b/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexOutputFormat.java
deleted file mode 100644
index acdbe51..0000000
--- a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseRyaVertexOutputFormat.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package mvm.rya.cloudbase.giraph.format;
-
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Value;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.cloudbase.RyaTableMutationsFactory;
-import org.apache.giraph.graph.Edge;
-import org.apache.giraph.graph.Vertex;
-import org.apache.giraph.graph.VertexWriter;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-
-/*
- Example subclass for writing vertices back to Cloudbase.
- */
-public class CloudbaseRyaVertexOutputFormat
-        extends CloudbaseVertexOutputFormat<Text, Text, Text> {
-
-    public VertexWriter<Text, Text, Text>
-    createVertexWriter(TaskAttemptContext context)
-            throws IOException, InterruptedException {
-        RecordWriter<Text, Mutation> writer =
-                cloudbaseOutputFormat.getRecordWriter(context);
-        String tableName = context.getConfiguration().get(OUTPUT_TABLE);
-        if (tableName == null)
-            throw new IOException("Forgot to set table name " +
-                    "using CloudbaseVertexOutputFormat.OUTPUT_TABLE");
-        return new CloudbaseEdgeVertexWriter(writer, tableName);
-    }
-
-    /*
-    Wraps RecordWriter for writing Mutations back to the configured Cloudbase Table.
-     */
-    public static class CloudbaseEdgeVertexWriter
-            extends CloudbaseVertexWriter<Text, Text, Text> {
-
-        public static final RyaTableMutationsFactory RYA_TABLE_MUTATIONS_FACTORY = new RyaTableMutationsFactory();
-        private final Text CF = new Text("cf");
-        private final Text PARENT = new Text("parent");
-        private Text tableName;
-
-        public CloudbaseEdgeVertexWriter(
-                RecordWriter<Text, Mutation> writer, String tableName) {
-            super(writer);
-            this.tableName = new Text(tableName);
-        }
-
-        /*
-        Write back a mutation that adds a qualifier for 'parent' containing the vertex value
-        as the cell value. Assume the vertex ID corresponds to a key.
-        */
-        public void writeVertex(Vertex<Text, Text, Text, ?> vertex)
-                throws IOException, InterruptedException {
-            RecordWriter<Text, Mutation> writer = getRecordWriter();
-            Text subj = vertex.getId();
-            Iterable<Edge<Text, Text>> edges = vertex.getEdges();
-            for (Edge<Text, Text> edge : edges) {
-                Text pred = edge.getTargetVertexId();
-                Text obj = edge.getValue();
-                Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize =
-                        RYA_TABLE_MUTATIONS_FACTORY.serialize(new RyaURI(subj.toString()),
-                                new RyaURI(pred.toString()), new RyaType(obj.toString()), null);
-                Collection<Mutation> mutations = serialize.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO);
-                for (Mutation mut : mutations) {
-                    writer.write(tableName, mut); //TODO: Assuming SPO
-                }
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexInputFormat.java
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexInputFormat.java b/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexInputFormat.java
deleted file mode 100644
index fcc0c5e..0000000
--- a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexInputFormat.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package mvm.rya.cloudbase.giraph.format;
-
-import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import org.apache.giraph.graph.VertexInputFormat;
-import org.apache.giraph.graph.VertexReader;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * Class which wraps the CloudbaseInputFormat. It's designed
- * as an extension point to VertexInputFormat subclasses who wish
- * to read from Cloudbase Tables.
- *
- * @param <I> vertex id type
- * @param <V> vertex value type
- * @param <E> edge type
- * @param <M> message type
- */
-public abstract class CloudbaseVertexInputFormat<
-        I extends WritableComparable,
-        V extends Writable,
-        E extends Writable,
-        M extends Writable>
-        extends VertexInputFormat<I, V, E, M> implements Configurable {
-    /**
-     * delegate input format for all cloudbase operations.
-     */
-    protected CloudbaseInputFormat cloudbaseInputFormat =
-            new CloudbaseInputFormat();
-
-    /**
-     * Configured and injected by the job
-     */
-    private Configuration conf;
-
-    @Override
-    public Configuration getConf() {
-        return conf;
-    }
-
-    @Override
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
-
-    /**
-     * Abstract class which provides a template for instantiating vertices
-     * from Cloudbase Key/Value pairs.
-     *
-     * @param <I> vertex id type
-     * @param <V> vertex value type
-     * @param <E> edge type
-     * @param <M> message type
-     */
-    public abstract static class CloudbaseVertexReader<
-            I extends WritableComparable,
-            V extends Writable, E extends Writable, M extends Writable>
-            implements VertexReader<I, V, E, M> {
-
-        /**
-         * Used by subclasses to read key/value pairs.
-         */
-        private final RecordReader<Key, Value> reader;
-        /**
-         * Context passed to initialize
-         */
-        private TaskAttemptContext context;
-
-        /**
-         * Constructor used to pass Record Reader instance
-         *
-         * @param reader Cloudbase record reader
-         */
-        public CloudbaseVertexReader(RecordReader<Key, Value> reader) {
-            this.reader = reader;
-        }
-
-        @Override
-        public void initialize(InputSplit inputSplit,
-                               TaskAttemptContext context)
-                throws IOException, InterruptedException {
-            reader.initialize(inputSplit, context);
-            this.context = context;
-        }
-
-        /**
-         * close
-         *
-         * @throws IOException
-         */
-        public void close() throws IOException {
-            reader.close();
-        }
-
-        /**
-         * getProgress
-         *
-         * @return progress
-         * @throws IOException
-         * @throws InterruptedException
-         */
-        public float getProgress() throws IOException, InterruptedException {
-            return reader.getProgress();
-        }
-
-        /**
-         * Get the result record reader
-         *
-         * @return Record reader to be used for reading.
-         */
-        protected RecordReader<Key, Value> getRecordReader() {
-            return reader;
-        }
-
-        /**
-         * getContext
-         *
-         * @return Context passed to initialize.
-         */
-        protected TaskAttemptContext getContext() {
-            return context;
-        }
-
-    }
-
-    /**
-     * getSplits
-     *
-     * @param context    Context of the job
-     * @param numWorkers Number of workers used for this job
-     * @return tablet splits
-     * @throws IOException
-     * @throws InterruptedException
-     */
-    public List<InputSplit> getSplits(
-            JobContext context, int numWorkers)
-            throws IOException, InterruptedException {
-        List<InputSplit> splits = null;
-        try {
-            splits = cloudbaseInputFormat.getSplits(context);
-        } catch (IOException e) {
-            if (e.getMessage().contains("Input info has not been set")) {
-                throw new IOException(e.getMessage() +
-                        " Make sure you initialized" +
-                        " CloudbaseInputFormat static setters " +
-                        "before passing the config to GiraphJob.");
-            }
-        }
-        return splits;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexOutputFormat.java
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexOutputFormat.java b/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexOutputFormat.java
deleted file mode 100644
index f88dfe6..0000000
--- a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/CloudbaseVertexOutputFormat.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package mvm.rya.cloudbase.giraph.format;
-
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Mutation;
-import org.apache.giraph.graph.VertexOutputFormat;
-import org.apache.giraph.graph.VertexWriter;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.OutputCommitter;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import java.io.IOException;
-/**
- *
- *  Class which wraps the CloudbaseOutputFormat. It's designed
- *  as an extension point to VertexOutputFormat subclasses who wish
- *  to write vertices back to an Cloudbase table.
- *
- *
- * @param <I> vertex id type
- * @param <V>  vertex value type
- * @param <E>  edge type
- */
-public abstract class CloudbaseVertexOutputFormat<
-        I extends WritableComparable,
-        V extends Writable,
-        E extends Writable>
-        extends VertexOutputFormat<I, V, E> implements Configurable {
-
-
-  /**
-   * Output table parameter
-   */
-  protected static final String OUTPUT_TABLE = "OUTPUT_TABLE";
-
-  /**
-   * Cloudbase delegate for table output
-   */
-  protected CloudbaseOutputFormat cloudbaseOutputFormat =
-          new CloudbaseOutputFormat();
-
-
-  /**
-   * Used by configured interface
-   */
-  private Configuration conf;
-
-  /**
-   *
-   * Main abstraction point for vertex writers to persist back
-   * to Cloudbase tables.
-   *
-   * @param <I> vertex id type
-   * @param <V> vertex value type
-   * @param <E>  edge type
-   */
-  public abstract static class CloudbaseVertexWriter<
-          I extends WritableComparable,
-          V extends Writable,
-          E extends Writable>
-          implements VertexWriter<I, V, E> {
-
-    /**
-     * task attempt context.
-     */
-    private TaskAttemptContext context;
-
-    /**
-     * Cloudbase record writer
-     */
-    private RecordWriter<Text, Mutation> recordWriter;
-
-    /**
-     * Constructor for use with subclasses
-     *
-     * @param recordWriter cloudbase record writer
-     */
-    public CloudbaseVertexWriter(RecordWriter<Text, Mutation> recordWriter) {
-      this.recordWriter = recordWriter;
-    }
-
-    /**
-     * initialize
-     *
-     * @param context Context used to write the vertices.
-     * @throws IOException
-     */
-    public void initialize(TaskAttemptContext context) throws IOException {
-      this.context = context;
-    }
-
-    /**
-     *  close
-     *
-     * @param context the context of the task
-     * @throws IOException
-     * @throws InterruptedException
-     */
-    public void close(TaskAttemptContext context)
-      throws IOException, InterruptedException {
-      recordWriter.close(context);
-    }
-
-    /**
-     * Get the table record writer;
-     *
-     * @return Record writer to be used for writing.
-     */
-    public RecordWriter<Text, Mutation> getRecordWriter() {
-      return recordWriter;
-    }
-
-    /**
-     * Get the context.
-     *
-     * @return Context passed to initialize.
-     */
-    public TaskAttemptContext getContext() {
-      return context;
-    }
-
-  }
-
-  @Override
-  public void setConf(Configuration conf) {
-    this.conf = conf;
-  }
-
-  @Override
-  public Configuration getConf() {
-    return this.conf;
-  }
-
-  /**
-   *
-   * checkOutputSpecs
-   *
-   * @param context information about the job
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public void checkOutputSpecs(JobContext context)
-    throws IOException, InterruptedException {
-    try {
-      cloudbaseOutputFormat.checkOutputSpecs(context);
-    } catch (IOException e) {
-      if (e.getMessage().contains("Output info has not been set")) {
-        throw new IOException(e.getMessage() + " Make sure you initialized" +
-                " CloudbaseOutputFormat static setters " +
-                "before passing the config to GiraphJob.");
-      }
-    }
-  }
-
-  /**
-   * getOutputCommitter
-   *
-   * @param context the task context
-   * @return OutputCommitter
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public OutputCommitter getOutputCommitter(TaskAttemptContext context)
-    throws IOException, InterruptedException {
-    return cloudbaseOutputFormat.getOutputCommitter(context);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/PrintVertexOutputFormat.java
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/PrintVertexOutputFormat.java b/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/PrintVertexOutputFormat.java
deleted file mode 100644
index e90ca66..0000000
--- a/extras/cloudbase.rya.giraph/src/main/java/mvm/rya/cloudbase/giraph/format/PrintVertexOutputFormat.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package mvm.rya.cloudbase.giraph.format;
-
-import org.apache.giraph.graph.Vertex;
-import org.apache.giraph.graph.VertexOutputFormat;
-import org.apache.giraph.graph.VertexWriter;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.OutputCommitter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import java.io.IOException;
-
-/**
- * Date: 7/27/12
- * Time: 2:58 PM
- */
-public class PrintVertexOutputFormat<
-        I extends WritableComparable,
-        V extends Writable,
-        E extends Writable>
-        extends VertexOutputFormat<I, V, E> implements Configurable {
-    @Override
-    public void setConf(Configuration entries) {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public Configuration getConf() {
-        return null;  //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public VertexWriter<I, V, E> createVertexWriter(TaskAttemptContext context) throws IOException, InterruptedException {
-        return new VertexWriter<I, V, E>() {
-            @Override
-            public void initialize(TaskAttemptContext context) throws IOException {
-                //To change body of implemented methods use File | Settings | File Templates.
-            }
-
-            @Override
-            public void writeVertex(Vertex<I, V, E, ?> iveVertex) throws IOException, InterruptedException {
-                System.out.println(iveVertex);
-            }
-
-            @Override
-            public void close(TaskAttemptContext context) throws IOException, InterruptedException {
-                //To change body of implemented methods use File | Settings | File Templates.
-            }
-        };
-    }
-
-    @Override
-    public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
-        //To change body of implemented methods use File | Settings | File Templates.
-    }
-
-    @Override
-    public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException {
-        return new OutputCommitter() {
-            @Override
-            public void setupJob(JobContext jobContext) throws IOException {
-                //To change body of implemented methods use File | Settings | File Templates.
-            }
-
-            @Override
-            public void cleanupJob(JobContext jobContext) throws IOException {
-                //To change body of implemented methods use File | Settings | File Templates.
-            }
-
-            @Override
-            public void setupTask(TaskAttemptContext taskAttemptContext) throws IOException {
-                //To change body of implemented methods use File | Settings | File Templates.
-            }
-
-            @Override
-            public boolean needsTaskCommit(TaskAttemptContext taskAttemptContext) throws IOException {
-                return false;  //To change body of implemented methods use File | Settings | File Templates.
-            }
-
-            @Override
-            public void commitTask(TaskAttemptContext taskAttemptContext) throws IOException {
-                //To change body of implemented methods use File | Settings | File Templates.
-            }
-
-            @Override
-            public void abortTask(TaskAttemptContext taskAttemptContext) throws IOException {
-                //To change body of implemented methods use File | Settings | File Templates.
-            }
-        };
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/BspCase.java
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/BspCase.java b/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/BspCase.java
deleted file mode 100644
index fb20dd8..0000000
--- a/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/BspCase.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package mvm.rya.cloudbase.giraph.format;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.Watcher;
-
-import org.apache.giraph.examples.GeneratedVertexReader;
-import org.apache.giraph.graph.GiraphJob;
-import org.apache.giraph.zk.ZooKeeperExt;
-
-import junit.framework.TestCase;
-
-/**
- * Duplicate copy from main giraph trunk. At least until there
- * is a maven test artifact for Giraph.
- *
- * Extended TestCase for making setting up Bsp testing.
- */
-public class BspCase extends TestCase implements Watcher {
-  /** JobTracker system property */
-  private final String jobTracker =
-      System.getProperty("prop.mapred.job.tracker");
-  /** Jar location system property */
-  private final String jarLocation =
-      System.getProperty("prop.jarLocation", "");
-  /** Number of actual processes for the BSP application */
-  private int numWorkers = 1;
-  /** ZooKeeper list system property */
-  private final String zkList = System.getProperty("prop.zookeeper.list");
-
-  /**
-   * Adjust the configuration to the basic test case
-   */
-  public final void setupConfiguration(GiraphJob job) {
-    Configuration conf = job.getConfiguration();
-    conf.set("mapred.jar", getJarLocation());
-
-    // Allow this test to be run on a real Hadoop setup
-    if (getJobTracker() != null) {
-      System.out.println("setup: Sending job to job tracker " +
-          getJobTracker() + " with jar path " + getJarLocation()
-          + " for " + getName());
-      conf.set("mapred.job.tracker", getJobTracker());
-      job.setWorkerConfiguration(getNumWorkers(),
-          getNumWorkers(),
-          100.0f);
-    }
-    else {
-      System.out.println("setup: Using local job runner with " +
-          "location " + getJarLocation() + " for "
-          + getName());
-      job.setWorkerConfiguration(1, 1, 100.0f);
-      // Single node testing
-      conf.setBoolean(GiraphJob.SPLIT_MASTER_WORKER, false);
-    }
-    conf.setInt(GiraphJob.POLL_ATTEMPTS, 10);
-    conf.setInt(GiraphJob.POLL_MSECS, 3*1000);
-    conf.setInt(GiraphJob.ZOOKEEPER_SERVERLIST_POLL_MSECS, 500);
-    if (getZooKeeperList() != null) {
-      job.setZooKeeperConfiguration(getZooKeeperList());
-    }
-    // GeneratedInputSplit will generate 5 vertices
-    conf.setLong(GeneratedVertexReader.READER_VERTICES, 5);
-  }
-
-  /**
-   * Create the test case
-   *
-   * @param testName name of the test case
-   */
-  public BspCase(String testName) {
-    super(testName);
-
-  }
-
-  /**
-   * Get the number of workers used in the BSP application
-   *
-   */
-  public int getNumWorkers() {
-    return numWorkers;
-  }
-
-  /**
-   * Get the ZooKeeper list
-   */
-  public String getZooKeeperList() {
-    return zkList;
-  }
-
-  /**
-   * Get the jar location
-   *
-   * @return location of the jar file
-   */
-  String getJarLocation() {
-    return jarLocation;
-  }
-
-  /**
-   * Get the job tracker location
-   *
-   * @return job tracker location as a string
-   */
-  String getJobTracker() {
-    return jobTracker;
-  }
-
-  /**
-   * Get the single part file status and make sure there is only one part
-   *
-   * @param job Job to get the file system from
-   * @param partDirPath Directory where the single part file should exist
-   * @return Single part file status
-   * @throws java.io.IOException
-   */
-  public static FileStatus getSinglePartFileStatus(GiraphJob job,
-      Path partDirPath) throws IOException {
-    FileSystem fs = FileSystem.get(job.getConfiguration());
-    FileStatus[] statusArray = fs.listStatus(partDirPath);
-    FileStatus singlePartFileStatus = null;
-    int partFiles = 0;
-    for (FileStatus fileStatus : statusArray) {
-      if (fileStatus.getPath().getName().equals("part-m-00000")) {
-        singlePartFileStatus = fileStatus;
-      }
-      if (fileStatus.getPath().getName().startsWith("part-m-")) {
-        ++partFiles;
-      }
-    }
-    if (partFiles != 1) {
-      throw new ArithmeticException(
-          "getSinglePartFile: Part file count should be 1, but is " +
-              partFiles);
-    }
-    return singlePartFileStatus;
-  }
-
-  @Override
-  public void setUp() {
-    if (jobTracker != null) {
-      System.out.println("Setting tasks to 3 for " + getName() +
-          " since JobTracker exists...");
-      numWorkers = 3;
-    }
-    try {
-      Configuration conf = new Configuration();
-      FileSystem hdfs = FileSystem.get(conf);
-      // Since local jobs always use the same paths, remove them
-      Path oldLocalJobPaths = new Path(
-          GiraphJob.ZOOKEEPER_MANAGER_DIR_DEFAULT);
-      FileStatus[] fileStatusArr;
-      try {
-        fileStatusArr = hdfs.listStatus(oldLocalJobPaths);
-        for (FileStatus fileStatus : fileStatusArr) {
-          if (fileStatus.isDir() &&
-              fileStatus.getPath().getName().contains("job_local")) {
-            System.out.println("Cleaning up local job path " +
-                fileStatus.getPath().getName());
-            hdfs.delete(oldLocalJobPaths, true);
-          }
-        }
-      } catch (FileNotFoundException e) {
-        // ignore this FileNotFound exception and continue.
-      }
-      if (zkList == null) {
-        return;
-      }
-      ZooKeeperExt zooKeeperExt =
-          new ZooKeeperExt(zkList, 30*1000, this);
-      List<String> rootChildren = zooKeeperExt.getChildren("/", false);
-      for (String rootChild : rootChildren) {
-        if (rootChild.startsWith("_hadoopBsp")) {
-          List<String> children =
-              zooKeeperExt.getChildren("/" + rootChild, false);
-          for (String child: children) {
-            if (child.contains("job_local_")) {
-              System.out.println("Cleaning up /_hadoopBsp/" +
-                  child);
-              zooKeeperExt.deleteExt(
-                  "/_hadoopBsp/" + child, -1, true);
-            }
-          }
-        }
-      }
-      zooKeeperExt.close();
-    } catch (Exception e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void process(WatchedEvent event) {
-    // Do nothing
-  }
-
-  /**
-   * Helper method to remove an old output directory if it exists,
-   * and set the output path for any VertexOutputFormat that uses
-   * FileOutputFormat.
-   *
-   * @param job Job to set the output path for
-   * @throws java.io.IOException
-   */
-  public static void removeAndSetOutput(GiraphJob job,
-      Path outputPath) throws IOException {
-    remove(job.getConfiguration(), outputPath);
-    FileOutputFormat.setOutputPath(job.getInternalJob(), outputPath);
-  }
-
-  /**
-   * Helper method to remove a path if it exists.
-   *
-   * @param conf Configuration to load FileSystem from
-   * @param path Path to remove
-   * @throws java.io.IOException
-   */
-  public static void remove(Configuration conf, Path path)
-      throws IOException {
-    FileSystem hdfs = FileSystem.get(conf);
-    hdfs.delete(path, true);
-  }
-
-  public static String getCallingMethodName() {
-    return Thread.currentThread().getStackTrace()[2].getMethodName();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/TestCloudbaseVertexFormat.java
----------------------------------------------------------------------
diff --git a/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/TestCloudbaseVertexFormat.java b/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/TestCloudbaseVertexFormat.java
deleted file mode 100644
index e420ff6..0000000
--- a/extras/cloudbase.rya.giraph/src/test/java/mvm/rya/cloudbase/giraph/format/TestCloudbaseVertexFormat.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package mvm.rya.cloudbase.giraph.format;
-
-import cloudbase.core.client.BatchWriter;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Range;
-import cloudbase.core.security.Authorizations;
-import junit.framework.Test;
-import junit.framework.TestSuite;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.cloudbase.CloudbaseRdfConfiguration;
-import mvm.rya.cloudbase.CloudbaseRyaDAO;
-import org.apache.giraph.graph.EdgeListVertex;
-import org.apache.giraph.graph.GiraphJob;
-import org.apache.giraph.lib.TextVertexOutputFormat;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.log4j.Logger;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-
-/*
-    Test class for Cloudbase vertex input/output formats.
- */
-public class TestCloudbaseVertexFormat extends BspCase {
-
-    private final String TABLE_NAME = "rya_spo";
-    private final String INSTANCE_NAME = "stratus";
-    private final Text FAMILY = new Text("cf");
-    private final Text CHILDREN = new Text("children");
-    private final String USER = "root";
-    private final byte[] PASSWORD = new byte[]{};
-    private final Text OUTPUT_FIELD = new Text("parent");
-
-
-    private final Logger log = Logger.getLogger(TestCloudbaseVertexFormat.class);
-
-    /**
-     * Create the test case
-     *
-     * @param testName name of the test case
-     */
-    public TestCloudbaseVertexFormat(String testName) {
-        super(testName);
-    }
-
-    /**
-     * @return the suite of tests being tested
-     */
-    public static Test suite() {
-        return new TestSuite(TestCloudbaseVertexFormat.class);
-
-    }
-
-    /*
-    Write a simple parent-child directed graph to Cloudbase.
-    Run a job which reads the values
-    into subclasses that extend CloudbaseVertex I/O formats.
-    Check the output after the job.
-    */
-    public void testCloudbaseInputOutput() throws Exception {
-//        if (System.getProperty("prop.mapred.job.tracker") != null) {
-//            if(log.isInfoEnabled())
-//                log.info("testCloudbaseInputOutput: " +
-//                        "Ignore this test if not local mode.");
-//            return;
-//        }
-//
-//        System.setProperty("prop.jarLocation", "/temp/cloudbase.rya.giraph-3.0.0-SNAPSHOT.jar");
-        File jarTest = new File(System.getProperty("prop.jarLocation"));
-        if (!jarTest.exists()) {
-            fail("Could not find Giraph jar at " +
-                    "location specified by 'prop.jarLocation'. " +
-                    "Make sure you built the main Giraph artifact?.");
-        }
-
-        //Write out vertices and edges out to a mock instance.
-//        MockInstance mockInstance = new MockInstance(INSTANCE_NAME);
-        Connector c = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes());
-        CloudbaseRyaDAO ryaDAO = new CloudbaseRyaDAO();
-        ryaDAO.setConnector(c);
-        CloudbaseRdfConfiguration cloudbaseRdfConfiguration = new CloudbaseRdfConfiguration();
-//        cloudbaseRdfConfiguration.setTablePrefix("test_");
-        ryaDAO.init();
-//        c.tableOperations().create(TABLE_NAME);
-//        BatchWriter bw = c.createBatchWriter(TABLE_NAME, 10000L, 1000L, 4);
-
-        ryaDAO.add(new RyaStatement(new RyaURI("urn:test#1234"),
-                new RyaURI("urn:test#pred1"),
-                new RyaURI("urn:test#obj1")));
-        ryaDAO.add(new RyaStatement(new RyaURI("urn:test#1234"),
-                new RyaURI("urn:test#pred2"),
-                new RyaURI("urn:test#obj2")));
-        ryaDAO.add(new RyaStatement(new RyaURI("urn:test#1234"),
-                new RyaURI("urn:test#pred3"),
-                new RyaURI("urn:test#obj3")));
-        ryaDAO.add(new RyaStatement(new RyaURI("urn:test#1234"),
-                new RyaURI("urn:test#pred4"),
-                new RyaURI("urn:test#obj4")));
-        ryaDAO.commit();
-
-//        Mutation m1 = new Mutation(new Text("0001"));
-//        m1.put(FAMILY, CHILDREN, new Value("0002".getBytes()));
-//        bw.addMutation(m1);
-//
-//        Mutation m2 = new Mutation(new Text("0002"));
-//        m2.put(FAMILY, CHILDREN, new Value("0003".getBytes()));
-//        bw.addMutation(m2);
-//        if(log.isInfoEnabled())
-//            log.info("Writing mutations to Cloudbase table");
-//        bw.close();
-
-        Configuration conf = new Configuration();
-//        conf.set(CloudbaseVertexOutputFormat.OUTPUT_TABLE, TABLE_NAME);
-
-        /*
-        Very important to initialize the formats before
-        sending configuration to the GiraphJob. Otherwise
-        the internally constructed Job in GiraphJob will
-        not have the proper context initialization.
-         */
-        GiraphJob job = new GiraphJob(conf, getCallingMethodName());
-        CloudbaseInputFormat.setInputInfo(job.getInternalJob(), USER, "password".getBytes(),
-                TABLE_NAME, new Authorizations());
-//        CloudbaseInputFormat.setMockInstance(job.getInternalJob(), INSTANCE_NAME);
-        CloudbaseInputFormat.setZooKeeperInstance(job.getInternalJob(), "stratus", "stratus13:2181");
-        CloudbaseInputFormat.setRanges(job.getInternalJob(), Collections.singleton(new Range()));
-
-//        CloudbaseOutputFormat.setOutputInfo(job.getInternalJob(), USER, PASSWORD, true, null);
-//        CloudbaseOutputFormat.setMockInstance(job.getInternalJob(), INSTANCE_NAME);
-
-        setupConfiguration(job);
-        job.setVertexClass(EdgeNotification.class);
-        job.setVertexInputFormatClass(CloudbaseRyaVertexInputFormat.class);
-        job.setVertexOutputFormatClass(PrintVertexOutputFormat.class);
-        FileOutputFormat.setOutputPath(job.getInternalJob(), new Path("/temp/graphout"));
-
-//        HashSet<Pair<Text, Text>> columnsToFetch = new HashSet<Pair<Text,Text>>();
-//        columnsToFetch.add(new Pair<Text, Text>(FAMILY, CHILDREN));
-//        CloudbaseInputFormat.fetchColumns(job.getInternalJob(), columnsToFetch);
-
-        if (log.isInfoEnabled())
-            log.info("Running edge notification job using Cloudbase input");
-        assertTrue(job.run(true));
-
-//        Scanner scanner = c.createScanner(TABLE_NAME, new Authorizations());
-//        scanner.setRange(new Range("0002", "0002"));
-//        scanner.fetchColumn(FAMILY, OUTPUT_FIELD);
-//        boolean foundColumn = false;
-//
-//        if(log.isInfoEnabled())
-//            log.info("Verify job output persisted correctly.");
-//        //make sure we found the qualifier.
-//        assertTrue(scanner.iterator().hasNext());
-//
-//
-//        //now we check to make sure the expected value from the job persisted correctly.
-//        for(Map.Entry<Key,Value> entry : scanner) {
-//            Text row = entry.getKey().getRow();
-//            assertEquals("0002", row.toString());
-//            Value value = entry.getValue();
-//            assertEquals("0001", ByteBufferUtil.toString(
-//                    ByteBuffer.wrap(value.get())));
-//            foundColumn = true;
-//        }
-    }
-
-    /*
-   Test compute method that sends each edge a notification of its parents.
-   The test set only has a 1-1 parent-to-child ratio for this unit test.
-    */
-    public static class EdgeNotification
-            extends EdgeListVertex<Text, Text, Text, Text> {
-        @Override
-        public void compute(Iterable<Text> messages) throws IOException {
-            System.out.println("Edges: " + messages);
-            for (Text message : messages) {
-                getValue().set(message);
-            }
-            if (getSuperstep() == 0) {
-//                sendMessageToAllEdges(getId());
-            }
-            voteToHalt();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.accumulo/pom.xml
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.accumulo/pom.xml b/extras/generic.mr/generic.mr.accumulo/pom.xml
deleted file mode 100644
index a573c61..0000000
--- a/extras/generic.mr/generic.mr.accumulo/pom.xml
+++ /dev/null
@@ -1,58 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>generic.mr</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>generic.mr.accumulo</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <dependencies>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>generic.mr.api</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.codehaus.gmaven</groupId>
-                <artifactId>gmaven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.accumulo/src/main/groovy/mvm/rya/generic/mr/accumulo/AccumuloMRInfo.groovy
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.accumulo/src/main/groovy/mvm/rya/generic/mr/accumulo/AccumuloMRInfo.groovy b/extras/generic.mr/generic.mr.accumulo/src/main/groovy/mvm/rya/generic/mr/accumulo/AccumuloMRInfo.groovy
deleted file mode 100644
index 89d4633..0000000
--- a/extras/generic.mr/generic.mr.accumulo/src/main/groovy/mvm/rya/generic/mr/accumulo/AccumuloMRInfo.groovy
+++ /dev/null
@@ -1,146 +0,0 @@
-package mvm.rya.generic.mr.accumulo
-
-import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat
-import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat
-import org.apache.accumulo.core.data.Key
-import org.apache.accumulo.core.data.Mutation
-import org.apache.accumulo.core.data.Value
-import org.apache.accumulo.core.security.Authorizations
-import org.apache.accumulo.core.security.ColumnVisibility
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.io.Text
-import org.apache.hadoop.mapreduce.Job
-import mvm.rya.generic.mr.api.MRInfo
-import org.apache.accumulo.core.client.mock.MockInstance
-import org.apache.accumulo.core.client.ZooKeeperInstance
-
-/**
- * Date: 12/3/12
- * Time: 9:00 AM
- */
-class AccumuloMRInfo implements MRInfo {
-
-    def Configuration conf
-    def connector;
-
-    @Override
-    void initMRJob(Job job, String table, String outtable, String[] auths) {
-        Configuration conf = job.configuration
-        String username = conf.get(USERNAME)
-        String password = conf.get(PASSWORD)
-        String instance = conf.get(INSTANCE)
-        String zookeepers = conf.get(ZOOKEEPERS)
-        String mock = conf.get(MOCK)
-
-        //input
-        if (Boolean.parseBoolean(mock)) {
-            AccumuloInputFormat.setMockInstance(conf, instance)
-            AccumuloOutputFormat.setMockInstance(conf, instance)
-        } else if (zookeepers != null) {
-            AccumuloInputFormat.setZooKeeperInstance(conf, instance, zookeepers)
-            AccumuloOutputFormat.setZooKeeperInstance(conf, instance, zookeepers)
-        } else {
-            throw new IllegalArgumentException("Must specify either mock or zookeepers");
-        }
-
-        AccumuloInputFormat.setInputInfo(conf, username, password.getBytes(), table, new Authorizations(auths))
-        job.setInputFormatClass(AccumuloInputFormat.class);
-
-        // OUTPUT
-        job.setOutputFormatClass(AccumuloOutputFormat.class);
-        job.setOutputKeyClass(Text.class);
-        job.setOutputValueClass(Mutation.class);
-        AccumuloOutputFormat.setOutputInfo(job, username, password.getBytes(), true, outtable);
-    }
-
-    @Override
-    def key(byte[] data) {
-        Key key = new Key();
-        key.readFields(new DataInputStream(new ByteArrayInputStream(data)))
-        return key
-    }
-
-    @Override
-    def key(String row, String cf, String cq, String cv, long timestamp) {
-        return new Key(row, cf, cq, cv, timestamp)
-    }
-
-    @Override
-    def value(byte[] data) {
-        return new Value(data)
-    }
-
-    @Override
-    def columnVisibility(String cv) {
-        return new ColumnVisibility(cv)
-    }
-
-    @Override
-    def mutation(String row, String cf, String cq, String cv, long timestamp, byte[] val) {
-        Mutation mutation = new Mutation(row);
-        mutation.put(cf, cq, columnVisibility(cv), timestamp, value(val))
-        return mutation
-    }
-
-    @Override
-    def instance() {
-        assert conf != null
-
-        String instance_str = conf.get(INSTANCE)
-        String zookeepers = conf.get(ZOOKEEPERS)
-        String mock = conf.get(MOCK)
-        if (Boolean.parseBoolean(mock)) {
-            return new MockInstance(instance_str)
-        } else if (zookeepers != null) {
-            return new ZooKeeperInstance(instance_str, zookeepers)
-        } else {
-            throw new IllegalArgumentException("Must specify either mock or zookeepers");
-        }
-    }
-
-    @Override
-    def connector(def instance) {
-        if (connector != null) return connector
-
-        String username = conf.get(USERNAME)
-        String password = conf.get(PASSWORD)
-        if (instance == null)
-            instance = instance()
-        connector = instance.getConnector(username, password)
-        return connector
-    }
-
-    @Override
-    def void writeMutations(def connector, String tableName, Iterator mutations) {
-        def bw = connector.createBatchWriter(tableName, 10000l, 10000l, 4);
-        mutations.each { m ->
-            bw.addMutation(m)
-        }
-        bw.flush()
-        bw.close()
-    }
-
-    @Override
-    def scanner(def connector, String tableName, String[] auths) {
-        return connector.createScanner(tableName, new Authorizations(auths))
-    }
-
-    @Override
-    def batchScanner(def connector, String tableName, String[] auths, int numThreads) {
-        return connector.createBatchScanner(tableName, new Authorizations(auths), numThreads)
-    }
-
-    @Override
-    def range(def startKey, def endKey) {
-        assert startKey != null
-
-        if (endKey != null)
-            return new org.apache.accumulo.core.data.Range(startKey, endKey)
-        return new org.apache.accumulo.core.data.Range(startKey)
-    }
-
-    @Override
-    def authorizations(String[] auths) {
-        return new Authorizations(auths)
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.accumulo/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.accumulo/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo b/extras/generic.mr/generic.mr.accumulo/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo
deleted file mode 100644
index 81d47de..0000000
--- a/extras/generic.mr/generic.mr.accumulo/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo
+++ /dev/null
@@ -1 +0,0 @@
-mvm.rya.generic.mr.accumulo.AccumuloMRInfo
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.api/pom.xml
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.api/pom.xml b/extras/generic.mr/generic.mr.api/pom.xml
deleted file mode 100644
index 9bf5124..0000000
--- a/extras/generic.mr/generic.mr.api/pom.xml
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>generic.mr</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>generic.mr.api</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <dependencies>
-        <dependency>
-            <groupId>org.codehaus.groovy</groupId>
-            <artifactId>groovy-all</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.codehaus.gmaven</groupId>
-                <artifactId>gmaven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfo.groovy
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfo.groovy b/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfo.groovy
deleted file mode 100644
index bdcc61e..0000000
--- a/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfo.groovy
+++ /dev/null
@@ -1,43 +0,0 @@
-package mvm.rya.generic.mr.api
-
-import org.apache.hadoop.conf.Configurable
-import org.apache.hadoop.mapreduce.Job
-
-/**
- * Date: 12/3/12
- * Time: 8:56 AM
- */
-public interface MRInfo extends Configurable{
-
-    public static final String USERNAME = "username"
-    public static final String PASSWORD = "password"
-    public static final String INSTANCE = "instance"
-    public static final String ZOOKEEPERS = "zookeepers"
-    public static final String MOCK = "mock"
-
-    def void initMRJob(Job job, String table, String outtable, String[] auths)
-
-    def key(byte[] data);
-
-    def key(String row, String cf, String cq, String cv, long timestamp);
-
-    def value(byte[] data);
-
-    def columnVisibility(String cv);
-
-    def mutation(String row, String cf, String cq, String cv, long timestamp, byte[] val);
-
-    def instance()
-
-    def connector(def instance)
-
-    def void writeMutations(def connector, String tableName, Iterator mutations)
-
-    def scanner(def connector, String tableName, String[] auths)
-
-    def batchScanner(def connector, String tableName, String[] auths, int numThreads)
-
-    def range(def startKey, def endKey)
-
-    def authorizations(String[] auths)
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfoContext.groovy
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfoContext.groovy b/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfoContext.groovy
deleted file mode 100644
index a2b92ec..0000000
--- a/extras/generic.mr/generic.mr.api/src/main/groovy/mvm/rya/generic/mr/api/MRInfoContext.groovy
+++ /dev/null
@@ -1,28 +0,0 @@
-package mvm.rya.generic.mr.api
-
-import org.apache.hadoop.conf.Configuration
-
-/**
- * Date: 12/5/12
- * Time: 1:32 PM
- */
-class MRInfoContext {
-
-    private static currentMrInfo;
-
-    public static MRInfo currentMRInfo() {
-        return currentMRInfo(null);
-    }
-
-    public static MRInfo currentMRInfo(Configuration config) {
-        if (currentMrInfo == null) {
-            def iter = ServiceLoader.load(MRInfo.class, Thread.currentThread().getContextClassLoader()).iterator()
-            if (iter.hasNext()) {
-                currentMrInfo = iter.next()
-                if (config != null) currentMrInfo.setConf(config)
-            }
-        }
-        return currentMrInfo
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.cloudbase/pom.xml
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.cloudbase/pom.xml b/extras/generic.mr/generic.mr.cloudbase/pom.xml
deleted file mode 100644
index 8aec0fc..0000000
--- a/extras/generic.mr/generic.mr.cloudbase/pom.xml
+++ /dev/null
@@ -1,33 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>generic.mr</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>generic.mr.cloudbase</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <dependencies>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>generic.mr.api</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.codehaus.gmaven</groupId>
-                <artifactId>gmaven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.cloudbase/src/main/groovy/mvm/rya/generic/mr/cloudbase/CloudbaseMRInfo.groovy
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.cloudbase/src/main/groovy/mvm/rya/generic/mr/cloudbase/CloudbaseMRInfo.groovy b/extras/generic.mr/generic.mr.cloudbase/src/main/groovy/mvm/rya/generic/mr/cloudbase/CloudbaseMRInfo.groovy
deleted file mode 100644
index 7608fb7..0000000
--- a/extras/generic.mr/generic.mr.cloudbase/src/main/groovy/mvm/rya/generic/mr/cloudbase/CloudbaseMRInfo.groovy
+++ /dev/null
@@ -1,146 +0,0 @@
-package mvm.rya.generic.mr.cloudbase
-
-import cloudbase.core.client.mapreduce.CloudbaseInputFormat
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat
-import cloudbase.core.security.Authorizations
-import mvm.rya.generic.mr.api.MRInfo
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.io.Text
-import org.apache.hadoop.mapreduce.Job
-import cloudbase.core.data.Mutation
-import cloudbase.core.data.Key
-import cloudbase.core.data.Value
-import cloudbase.core.security.ColumnVisibility
-import cloudbase.core.client.mock.MockInstance
-import cloudbase.core.client.ZooKeeperInstance
-
-/**
- * Date: 12/3/12
- * Time: 9:00 AM
- */
-class CloudbaseMRInfo implements MRInfo {
-
-    def Configuration conf
-    def connector;
-
-    @Override
-    void initMRJob(Job job, String table, String outtable, String[] auths) {
-        Configuration conf = job.configuration
-        String username = conf.get(USERNAME)
-        String password = conf.get(PASSWORD)
-        String instance = conf.get(INSTANCE)
-        String zookeepers = conf.get(ZOOKEEPERS)
-        String mock = conf.get(MOCK)
-
-        //input
-        if (Boolean.parseBoolean(mock)) {
-            CloudbaseInputFormat.setMockInstance(job, instance)
-//            CloudbaseOutputFormat.setMockInstance(conf, instance)
-        } else if (zookeepers != null) {
-            CloudbaseInputFormat.setZooKeeperInstance(job, instance, zookeepers)
-            CloudbaseOutputFormat.setZooKeeperInstance(job, instance, zookeepers)
-        } else {
-            throw new IllegalArgumentException("Must specify either mock or zookeepers");
-        }
-
-        CloudbaseInputFormat.setInputInfo(job, username, password.getBytes(), table, new Authorizations(auths))
-        job.setInputFormatClass(CloudbaseInputFormat.class);
-
-        // OUTPUT
-        job.setOutputFormatClass(CloudbaseOutputFormat.class);
-        job.setOutputKeyClass(Text.class);
-        job.setOutputValueClass(Mutation.class);
-        CloudbaseOutputFormat.setOutputInfo(job, username, password.getBytes(), true, outtable);
-    }
-
-    @Override
-    def key(byte[] data) {
-        Key key = new Key();
-        key.readFields(new DataInputStream(new ByteArrayInputStream(data)))
-        return key
-    }
-
-    @Override
-    def key(String row, String cf, String cq, String cv, long timestamp) {
-        return new Key(row, cf, cq, cv, timestamp)
-    }
-
-    @Override
-    def value(byte[] data) {
-        return new Value(data)
-    }
-
-    @Override
-    def columnVisibility(String cv) {
-        return new ColumnVisibility(cv)
-    }
-
-    @Override
-    def mutation(String row, String cf, String cq, String cv, long timestamp, byte[] val) {
-        Mutation mutation = new Mutation(row);
-        mutation.put(cf, cq, columnVisibility(cv), timestamp, value(val))
-        return mutation
-    }
-
-    @Override
-    def instance() {
-        assert conf != null
-
-        String instance_str = conf.get(INSTANCE)
-        String zookeepers = conf.get(ZOOKEEPERS)
-        String mock = conf.get(MOCK)
-        if (Boolean.parseBoolean(mock)) {
-            return new MockInstance(instance_str)
-        } else if (zookeepers != null) {
-            return new ZooKeeperInstance(instance_str, zookeepers)
-        } else {
-            throw new IllegalArgumentException("Must specify either mock or zookeepers");
-        }
-    }
-
-    @Override
-    def connector(def instance) {
-        if (connector != null) return connector
-
-        String username = conf.get(USERNAME)
-        String password = conf.get(PASSWORD)
-        if (instance == null)
-            instance = instance()
-        connector = instance.getConnector(username, password)
-        return connector
-    }
-
-    @Override
-    def void writeMutations(def connector, String tableName, Iterator mutations) {
-        def bw = connector.createBatchWriter(tableName, 10000l, 10000l, 4);
-        mutations.each { m ->
-            bw.addMutation(m)
-        }
-        bw.flush()
-        bw.close()
-    }
-
-    @Override
-    def scanner(def connector, String tableName, String[] auths) {
-        return connector.createScanner(tableName, new Authorizations(auths))
-    }
-
-    @Override
-    def batchScanner(def connector, String tableName, String[] auths, int numThreads) {
-        return connector.createBatchScanner(tableName, new Authorizations(auths), numThreads)
-    }
-
-    @Override
-    def range(def startKey, def endKey) {
-        assert startKey != null
-
-        if (endKey != null)
-            return new cloudbase.core.data.Range(startKey, endKey)
-        return new cloudbase.core.data.Range(startKey)
-    }
-
-    @Override
-    def authorizations(String[] auths) {
-        return new Authorizations(auths)
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/generic.mr.cloudbase/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo
----------------------------------------------------------------------
diff --git a/extras/generic.mr/generic.mr.cloudbase/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo b/extras/generic.mr/generic.mr.cloudbase/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo
deleted file mode 100644
index 728f9dd..0000000
--- a/extras/generic.mr/generic.mr.cloudbase/src/main/resources/META-INF/services/mvm.rya.generic.mr.api.MRInfo
+++ /dev/null
@@ -1 +0,0 @@
-mvm.rya.generic.mr.cloudbase.CloudbaseMRInfo
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/generic.mr/pom.xml
----------------------------------------------------------------------
diff --git a/extras/generic.mr/pom.xml b/extras/generic.mr/pom.xml
deleted file mode 100644
index 99a541e..0000000
--- a/extras/generic.mr/pom.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.extras</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>generic.mr</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <packaging>pom</packaging>
-    <modules>
-        <module>generic.mr.api</module>
-        <module>generic.mr.accumulo</module>
-    </modules>
-
-    <profiles>
-        <profile>
-            <id>cloudbase</id>
-            <modules>
-                <module>generic.mr.cloudbase</module>
-            </modules>
-        </profile>
-    </profiles>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/pom.xml
----------------------------------------------------------------------
diff --git a/extras/indexing/pom.xml b/extras/indexing/pom.xml
index 5e6c7b7..f484916 100644
--- a/extras/indexing/pom.xml
+++ b/extras/indexing/pom.xml
@@ -1,92 +1,85 @@
+<?xml version='1.0'?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.extras</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
 
-    <modelVersion>4.0.0</modelVersion>
-    <name>${project.groupId}.${project.artifactId}</name>
     <artifactId>rya.indexing</artifactId>
+    <name>Apache Rya Secondary Indexing</name>
 
     <dependencies>
         <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.sail.impl</artifactId>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.sail</artifactId>
             <exclusions>
-              <exclusion>
-                <artifactId>hsqldb</artifactId>
-                <groupId>hsqldb</groupId>
-              </exclusion>
+                <exclusion>
+                    <artifactId>hsqldb</artifactId>
+                    <groupId>hsqldb</groupId>
+                </exclusion>
             </exclusions>
         </dependency>
+
         <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>accumulo.rya</artifactId>
         </dependency>
         <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>mongodb.rya</artifactId>
         </dependency>
         <dependency>
-            <groupId>org.apache.lucene</groupId>
-            <artifactId>lucene-core</artifactId>
-            <version>3.6.2</version>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.prospector</artifactId>
         </dependency>
+
+        <!-- Free Text Indexing -->
         <dependency>
             <groupId>org.apache.lucene</groupId>
-            <artifactId>lucene-analyzers</artifactId>
-            <version>3.6.2</version>
+            <artifactId>lucene-core</artifactId>
         </dependency>
         <dependency>
             <groupId>org.apache.lucene</groupId>
             <artifactId>lucene-analyzers</artifactId>
-            <version>3.6.2</version>
         </dependency>
 
         <dependency>
             <groupId>commons-codec</groupId>
             <artifactId>commons-codec</artifactId>
-            <version>1.4</version>
-        </dependency>
-
-        <!-- I was having issues with hadoop conf, but adding xerces and xalan fixed it -->
-
-        <dependency>
-            <groupId>xerces</groupId>
-            <artifactId>xercesImpl</artifactId>
-            <version>2.9.1</version>
-        </dependency>
-        <dependency>
-            <groupId>xalan</groupId>
-            <artifactId>xalan</artifactId>
-            <version>2.7.1</version>
         </dependency>
 
         <!-- Geo Indexing -->
         <dependency>
             <groupId>org.locationtech.geomesa</groupId>
             <artifactId>geomesa-accumulo-datastore</artifactId>
-            <version>${geomesa.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.prospector</artifactId>
         </dependency>
 
-      <dependency>
-        <groupId>org.apache.accumulo</groupId>
-        <artifactId>accumulo-core</artifactId>
-      </dependency>
-         <dependency>
-            <groupId>org.mongodb</groupId>
-            <artifactId>mongo-java-driver</artifactId>
-            <version>2.13.0-rc0</version>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
         </dependency>
     </dependencies>
     <build>
@@ -97,36 +90,30 @@
                 <executions>
                     <execution>
                         <configuration>
-                        <shadedArtifactAttached>true</shadedArtifactAttached>
+                            <shadedArtifactAttached>true</shadedArtifactAttached>
                             <shadedClassifierName>map-reduce</shadedClassifierName>
                             <transformers>
                                 <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
                             </transformers>
                         </configuration>
                     </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <executions>
-                     <execution>
+                    <execution>
                         <id>accumulo-server</id>
                         <phase>package</phase>
                         <goals>
                             <goal>shade</goal>
                         </goals>
                         <configuration>
-                        <shadedArtifactAttached>true</shadedArtifactAttached>
+                            <shadedArtifactAttached>true</shadedArtifactAttached>
                             <shadedClassifierName>accumulo-server</shadedClassifierName>
                             <artifactSet>
                                 <excludes>
-                                  <exclude>org.locationtech.geomesa:*</exclude>
-                                  <exclude>scala:*</exclude>
-                                  <exclude>org.apache.accumulo:*</exclude>
-                                  <exclude>org.apache.thrift:*</exclude>
-                                  <exclude>org.apache.hadoop:*</exclude>
-                                  <exclude>org.apache.zookeeper:*</exclude>
+                                    <exclude>org.locationtech.geomesa:*</exclude>
+                                    <exclude>scala:*</exclude>
+                                    <exclude>org.apache.accumulo:*</exclude>
+                                    <exclude>org.apache.thrift:*</exclude>
+                                    <exclude>org.apache.hadoop:*</exclude>
+                                    <exclude>org.apache.zookeeper:*</exclude>
                                 </excludes>
                             </artifactSet>
                             <transformers>
@@ -138,64 +125,4 @@
             </plugin>
         </plugins>
     </build>
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>accumulo.iterators</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                    <optional>true</optional>
-                </dependency>
-                <dependency>
-                    <groupId>mvm.rya</groupId>
-                    <artifactId>cloudbase.iterators</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>mr</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-        </profile>
-    </profiles>
-
-
-    <repositories>
-        <repository>
-            <id>cloudera</id>
-            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-        </repository>
-        <repository>
-            <id>public.opensahara.com</id>
-            <url>http://dev.opensahara.com/nexus/content/groups/public/</url>
-        </repository>
-        <repository>
-            <id>geotools</id>
-            <url>http://download.osgeo.org/webdav/geotools/</url>
-        </repository>
-    </repositories>
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java b/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java
index b6063ca..fefd651 100644
--- a/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java
+++ b/extras/indexing/src/main/java/mvm/rya/accumulo/documentIndex/DocIndexIteratorUtil.java
@@ -1,5 +1,25 @@
 package mvm.rya.accumulo.documentIndex;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 public class DocIndexIteratorUtil {
 
     



[22/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionSail.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionSail.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionSail.java
deleted file mode 100644
index 07eb411..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionSail.java
+++ /dev/null
@@ -1,122 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import cloudbase.core.client.CBException;
-import cloudbase.core.client.CBSecurityException;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import mvm.mmrts.rdf.partition.converter.ContextColVisConverter;
-import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-import mvm.mmrts.rdf.partition.shard.ShardValueGenerator;
-import org.apache.hadoop.conf.Configuration;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.sail.SailConnection;
-import org.openrdf.sail.SailException;
-import org.openrdf.sail.helpers.SailBase;
-
-/**
- * Class PartitionSail
- * Date: Jul 6, 2011
- * Time: 11:40:52 AM
- */
-public class PartitionSail extends SailBase {
-
-    protected Connector connector;
-
-    protected String table;
-    //MMRTS-148
-    protected String shardTable;
-
-    protected ShardValueGenerator generator = new DateHashModShardValueGenerator();
-
-    protected Configuration conf = new Configuration();
-
-    protected ContextColVisConverter contextColVisConverter;
-
-    public PartitionSail(Connector connector, String table) {
-        this(connector, table, table, null);
-    }
-
-    public PartitionSail(Connector connector, String table, String shardTable) {
-        this(connector, table, shardTable, null);
-    }
-
-    public PartitionSail(String instance, String zk, String user, String password, String table)
-            throws CBSecurityException, CBException {
-        this(instance, zk, user, password, table, (ShardValueGenerator) null);
-    }
-
-    public PartitionSail(String instance, String zk, String user, String password, String table, ShardValueGenerator generator)
-            throws CBSecurityException, CBException {
-        this(new ZooKeeperInstance(instance, zk).getConnector(user, password.getBytes()), table, table, generator);
-    }
-
-    public PartitionSail(String instance, String zk, String user, String password, String table, String shardTable)
-            throws CBSecurityException, CBException {
-        this(instance, zk, user, password, table, shardTable, null);
-    }
-
-    public PartitionSail(String instance, String zk, String user, String password, String table, String shardTable, ShardValueGenerator generator)
-            throws CBSecurityException, CBException {
-        this(new ZooKeeperInstance(instance, zk).getConnector(user, password.getBytes()), table, shardTable, generator);
-    }
-
-    public PartitionSail(Connector connector, String table, ShardValueGenerator generator) {
-        this(connector, table, table, generator);
-    }
-
-    public PartitionSail(Connector connector, String table, String shardTable, ShardValueGenerator generator) {
-        this.connector = connector;
-        this.table = table;
-        this.shardTable = shardTable;
-        if (generator != null)
-            this.generator = generator;
-    }
-
-    @Override
-    protected void shutDownInternal() throws SailException {
-    }
-
-    @Override
-    protected SailConnection getConnectionInternal() throws SailException {
-        return new PartitionConnection(this);
-    }
-
-    @Override
-    public boolean isWritable() throws SailException {
-        return true;
-    }
-
-    @Override
-    public ValueFactory getValueFactory() {
-        return ValueFactoryImpl.getInstance();
-    }
-
-    public Configuration getConf() {
-        return conf;
-    }
-
-    public Connector getConnector() {
-        return connector;
-    }
-
-    public ShardValueGenerator getGenerator() {
-        return generator;
-    }
-
-    public String getTable() {
-        return table;
-    }
-
-    public String getShardTable() {
-        return shardTable;
-    }
-
-    public ContextColVisConverter getContextColVisConverter() {
-        return contextColVisConverter;
-    }
-
-    public void setContextColVisConverter(ContextColVisConverter contextColVisConverter) {
-        this.contextColVisConverter = contextColVisConverter;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionTripleSource.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionTripleSource.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionTripleSource.java
deleted file mode 100644
index ca7772b..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/PartitionTripleSource.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package mvm.mmrts.rdf.partition;
-
-import info.aduna.iteration.CloseableIteration;
-import mvm.mmrts.rdf.partition.query.evaluation.ShardSubjectLookupStatementIterator;
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import org.apache.hadoop.conf.Configuration;
-import org.openrdf.model.*;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.evaluation.TripleSource;
-
-/**
- * Class PartitionTripleSource
- * Date: Jul 18, 2011
- * Time: 10:45:06 AM
- */
-public class PartitionTripleSource implements TripleSource {
-    private PartitionSail sail;
-    private Configuration configuration;
-
-    public PartitionTripleSource(PartitionSail sail, Configuration configuration) {
-        this.sail = sail;
-        this.configuration = configuration;
-    }
-
-    @Override
-    public CloseableIteration<? extends Statement, QueryEvaluationException> getStatements(Resource resource, URI uri, Value value, Resource... resources) throws QueryEvaluationException {
-        return null;  
-    }
-
-    public CloseableIteration<BindingSet, QueryEvaluationException> getStatements(ShardSubjectLookup lookup,
-                                                                                           BindingSet bindings, Resource... contexts) throws QueryEvaluationException {
-        return new ShardSubjectLookupStatementIterator(sail, lookup, bindings, configuration);
-    }
-
-    @Override
-    public ValueFactory getValueFactory() {
-        return PartitionConstants.VALUE_FACTORY;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/converter/ContextColVisConverter.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/converter/ContextColVisConverter.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/converter/ContextColVisConverter.java
deleted file mode 100644
index f462e9a..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/converter/ContextColVisConverter.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package mvm.mmrts.rdf.partition.converter;
-
-import cloudbase.core.security.ColumnVisibility;
-import org.openrdf.model.Resource;
-
-/**
- * Interface ContextColVisConverter
- * Date: Aug 5, 2011
- * Time: 7:35:40 AM
- */
-public interface ContextColVisConverter {
-
-    public ColumnVisibility convertContexts(Resource... contexts);
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/iterators/NamespaceIterator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/iterators/NamespaceIterator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/iterators/NamespaceIterator.java
deleted file mode 100644
index fc007e9..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/iterators/NamespaceIterator.java
+++ /dev/null
@@ -1,93 +0,0 @@
-package mvm.mmrts.rdf.partition.iterators;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.Scanner;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import info.aduna.iteration.CloseableIteration;
-import org.openrdf.model.Namespace;
-import org.openrdf.model.impl.NamespaceImpl;
-import org.openrdf.sail.SailException;
-
-import java.io.IOError;
-import java.util.Iterator;
-import java.util.Map.Entry;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-//TODO: Combine with CloudbaseStoreContextTableIterator4
-public class NamespaceIterator implements
-        CloseableIteration<Namespace, SailException> {
-
-    private boolean open = false;
-    private Iterator<Entry<Key, Value>> result;
-
-    public NamespaceIterator(Connector connector, String table) throws SailException {
-        initialize(connector, table);
-        open = true;
-    }
-
-    protected void initialize(Connector connector, String table) throws SailException {
-        try {
-            Scanner scanner = connector.createScanner(table,
-                    ALL_AUTHORIZATIONS);
-            scanner.fetchColumnFamily(NAMESPACE);
-            result = scanner.iterator();
-        } catch (TableNotFoundException e) {
-            throw new SailException("Exception occurred in Namespace Iterator",
-                    e);
-        }
-    }
-
-    @Override
-    public void close() throws SailException {
-        try {
-            verifyIsOpen();
-            open = false;
-        } catch (IOError e) {
-            throw new SailException(e);
-        }
-    }
-
-    public void verifyIsOpen() throws SailException {
-        if (!open) {
-            throw new SailException("Iterator not open");
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws SailException {
-        verifyIsOpen();
-        return result != null && result.hasNext();
-    }
-
-    @Override
-    public Namespace next() throws SailException {
-        if (hasNext()) {
-            Namespace namespace = getNamespace(result);
-            return namespace;
-        }
-        return null;
-    }
-
-    public static Namespace getNamespace(Iterator<Entry<Key, Value>> rowResults) {
-        for (; rowResults.hasNext();) {
-            Entry<Key, Value> next = rowResults.next();
-            Key key = next.getKey();
-            String cq = key.getColumnQualifier().toString();
-            return new NamespaceImpl(key.getRow().toString(), cq.toString());
-        }
-
-        return null;
-    }
-
-    @Override
-    public void remove() throws SailException {
-        next();
-    }
-
-    public boolean isOpen() {
-        return open;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/FilterTimeIndexVisitor.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/FilterTimeIndexVisitor.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/FilterTimeIndexVisitor.java
deleted file mode 100644
index 5964ea0..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/FilterTimeIndexVisitor.java
+++ /dev/null
@@ -1,113 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation;
-
-import org.apache.hadoop.conf.Configuration;
-import org.openrdf.model.impl.BooleanLiteralImpl;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.*;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-import java.util.List;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Class FilterTimeIndexVisitor
- * Date: Apr 11, 2011
- * Time: 10:16:15 PM
- */
-public class FilterTimeIndexVisitor extends QueryModelVisitorBase {
-
-    private Configuration conf;
-
-    public FilterTimeIndexVisitor(Configuration conf) {
-        this.conf = conf;
-    }
-
-    @Override
-    public void meet(Filter node) throws Exception {
-        super.meet(node);
-
-        ValueExpr arg = node.getCondition();
-        if (arg instanceof FunctionCall) {
-            FunctionCall fc = (FunctionCall) arg;
-            if (SHARDRANGE.stringValue().equals(fc.getURI())) {
-                List<ValueExpr> valueExprs = fc.getArgs();
-                if (valueExprs.size() != 3) {
-                    throw new QueryEvaluationException("mvm:shardRange must have 3 parameters: subject to run time index on, startTime(ms), endTime(ms)");
-                }
-                ValueExpr subj = valueExprs.get(0);
-                String subj_s = null;
-                if (subj instanceof Var) {
-                    subj_s = ((Var) subj).getName();
-                } else if (subj instanceof ValueConstant) {
-                    subj_s = ((ValueConstant) subj).getValue().stringValue();
-                }
-                if (subj_s == null)
-                    return; //no changes, need to figure out what shard lookup to add this time predicate to
-
-                String startTime = ((ValueConstant) valueExprs.get(1)).getValue().stringValue();
-                String endTime = ((ValueConstant) valueExprs.get(2)).getValue().stringValue();
-
-                this.conf.set(subj_s + "." + SHARDRANGE_BINDING, "true");
-                this.conf.set(subj_s + "." + SHARDRANGE_START, startTime);
-                this.conf.set(subj_s + "." + SHARDRANGE_END, endTime);
-
-                node.setCondition(new ValueConstant(BooleanLiteralImpl.TRUE));
-            }
-            if (TIMERANGE.stringValue().equals(fc.getURI())) {
-                List<ValueExpr> valueExprs = fc.getArgs();
-                if (valueExprs.size() != 4 && valueExprs.size() != 5) {
-                    throw new QueryEvaluationException("mvm:timeRange must have 4/5 parameters: subject to run time index on, time uri to index, startTime, endTime, time type(XMLDATETIME, TIMESTAMP)");
-                }
-
-                ValueExpr subj = valueExprs.get(0);
-                String subj_s = null;
-                if (subj instanceof Var) {
-                    subj_s = ((Var) subj).getName();
-                } else if (subj instanceof ValueConstant) {
-                    subj_s = ((ValueConstant) subj).getValue().stringValue();
-                }
-                if (subj_s == null)
-                    return; //no changes, need to figure out what shard lookup to add this time predicate to
-
-                ValueConstant timeUri_s = (ValueConstant) valueExprs.get(1);
-                URIImpl timeUri = new URIImpl(timeUri_s.getValue().stringValue());
-                String startTime = ((ValueConstant) valueExprs.get(2)).getValue().stringValue();
-                String endTime = ((ValueConstant) valueExprs.get(3)).getValue().stringValue();
-                TimeType timeType = TimeType.XMLDATETIME;
-                if (valueExprs.size() > 4)
-                    timeType = TimeType.valueOf(((ValueConstant) valueExprs.get(4)).getValue().stringValue());
-
-
-                this.conf.set(subj_s + "." + TIME_PREDICATE, timeUri.stringValue());
-                this.conf.set(subj_s + "." + START_BINDING, startTime);
-                this.conf.set(subj_s + "." + END_BINDING, endTime);
-                this.conf.set(subj_s + "." + TIME_TYPE_PROP, timeType.name());
-
-                //not setting global times
-                //set global start-end times
-//                String startTime_global = conf.get(START_BINDING);
-//                String endTime_global = conf.get(END_BINDING);
-//                if (startTime_global != null) {
-//                    long startTime_l = Long.parseLong(startTime);
-//                    long startTime_lg = Long.parseLong(startTime_global);
-//                    if (startTime_l < startTime_lg)
-//                        conf.set(START_BINDING, startTime);
-//                } else
-//                    conf.set(START_BINDING, startTime);
-//
-//                if (endTime_global != null) {
-//                    long endTime_l = Long.parseLong(endTime);
-//                    long endTime_lg = Long.parseLong(endTime_global);
-//                    if (endTime_l > endTime_lg)
-//                        conf.set(END_BINDING, endTime);
-//                } else
-//                    conf.set(END_BINDING, endTime);
-
-                node.setCondition(new ValueConstant(BooleanLiteralImpl.TRUE));
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/PartitionEvaluationStrategy.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/PartitionEvaluationStrategy.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/PartitionEvaluationStrategy.java
deleted file mode 100644
index bf898ff..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/PartitionEvaluationStrategy.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation;
-
-import cloudbase.core.client.Connector;
-import info.aduna.iteration.CloseableIteration;
-import mvm.mmrts.rdf.partition.PartitionTripleSource;
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.QueryRoot;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.TripleSource;
-import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl;
-
-import java.util.Map;
-
-/**
- * Class PartitionEvaluationStrategy
- * Date: Jul 14, 2011
- * Time: 4:10:03 PM
- */
-public class PartitionEvaluationStrategy extends EvaluationStrategyImpl {
-
-    public PartitionEvaluationStrategy(PartitionTripleSource tripleSource, Dataset dataset) {
-        super(tripleSource, dataset);
-    }
-
-    @Override
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(TupleExpr expr, BindingSet bindings) throws QueryEvaluationException {
-        if (expr instanceof QueryRoot) {
-            System.out.println(expr);
-        } else if (expr instanceof ShardSubjectLookup) {
-            return this.evaluate((ShardSubjectLookup) expr, bindings);
-        }
-        return super.evaluate(expr, bindings);
-    }
-
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(ShardSubjectLookup lookup, BindingSet bindings) throws QueryEvaluationException {
-        if (bindings.size() > 0) {
-            Var subjVar = lookup.getSubject();
-            if(bindings.hasBinding(subjVar.getName())){
-                subjVar.setValue(bindings.getValue(subjVar.getName()));
-            }
-            //populate the lookup
-            for (Map.Entry<Var, Var> predObj : lookup.getPredicateObjectPairs()) {
-                Var predVar = predObj.getKey();
-                Var objVar = predObj.getValue();
-
-                if(bindings.hasBinding(predVar.getName())) {
-                    predVar.setValue(bindings.getValue(predVar.getName()));
-                }
-                if(bindings.hasBinding(objVar.getName())) {
-                    objVar.setValue(bindings.getValue(objVar.getName()));
-                }
-            }
-        }
-        return ((PartitionTripleSource) tripleSource).getStatements(lookup, bindings, new Resource[0]);
-    }
-
-    @Override
-    public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(StatementPattern sp, BindingSet bindings) throws QueryEvaluationException {
-        ShardSubjectLookup lookup = new ShardSubjectLookup(sp.getSubjectVar());
-        lookup.addPredicateObjectPair(sp.getPredicateVar(), sp.getObjectVar());
-        return this.evaluate((ShardSubjectLookup) lookup, bindings);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/ShardSubjectLookupStatementIterator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/ShardSubjectLookupStatementIterator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/ShardSubjectLookupStatementIterator.java
deleted file mode 100644
index 097c52c..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/ShardSubjectLookupStatementIterator.java
+++ /dev/null
@@ -1,493 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation;
-
-import cloudbase.core.client.BatchScanner;
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.TableNotFoundException;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import com.google.common.io.ByteArrayDataOutput;
-import com.google.common.io.ByteStreams;
-import info.aduna.iteration.CloseableIteration;
-import info.aduna.iteration.EmptyIteration;
-import mvm.mmrts.rdf.partition.PartitionSail;
-import mvm.mmrts.rdf.partition.query.evaluation.select.FilterIterator;
-import mvm.mmrts.rdf.partition.query.evaluation.select.SelectAllIterator;
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import mvm.mmrts.rdf.partition.shard.DateHashModShardValueGenerator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.openrdf.model.URI;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.Var;
-import ss.cloudbase.core.iterators.CellLevelRecordIterator;
-import ss.cloudbase.core.iterators.GMDenIntersectingIterator;
-import ss.cloudbase.core.iterators.SortedRangeIterator;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import java.io.IOException;
-import java.util.*;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.writeValue;
-
-/**
- * Class ShardSubjectLookupStatementIterator
- * Date: Jul 18, 2011
- * Time: 10:53:55 AM
- */
-public class ShardSubjectLookupStatementIterator implements
-        CloseableIteration<BindingSet, QueryEvaluationException> {
-
-    private Connector connector;
-    private String table;
-    //MMRTS-148
-    private String shardTable;
-    private ShardSubjectLookup lookup;
-    private DateHashModShardValueGenerator generator;
-    private BatchScanner scanner;
-    private BindingSet bindings;
-    private CloseableIteration<BindingSet, QueryEvaluationException> iter;
-    private Configuration configuration;
-//    private TimeType timeType = TimeType.XMLDATETIME;
-    private Authorizations authorizations = ALL_AUTHORIZATIONS;
-
-    private int numThreads;
-
-    public ShardSubjectLookupStatementIterator(PartitionSail psail, ShardSubjectLookup lookup, BindingSet bindings, Configuration configuration) throws QueryEvaluationException {
-        this.connector = psail.getConnector();
-        this.lookup = lookup;
-        this.table = psail.getTable();
-        this.shardTable = psail.getShardTable();
-        this.bindings = bindings;
-        this.configuration = configuration;
-
-        //Time Type check
-//        timeType = TimeType.valueOf(this.configuration.get(TIME_TYPE_PROP, TimeType.XMLDATETIME.name()));
-
-        //authorizations
-        String auths = this.configuration.get(AUTHORIZATION_PROP);
-        if (auths != null) {
-            authorizations = new Authorizations(auths.split(","));
-        }
-
-        //TODO: for now we need this
-        this.generator = (DateHashModShardValueGenerator) psail.getGenerator();
-
-        this.numThreads = this.configuration.getInt(NUMTHREADS_PROP, generator.getBaseMod());
-
-        this.initialize();
-    }
-
-    public void initialize() throws QueryEvaluationException {
-        try {
-            /**
-             * Here we will set up the BatchScanner based on the lookup
-             */
-            Var subject = lookup.getSubject();
-            List<Map.Entry<Var, Var>> where = retrieveWhereClause();
-            List<Map.Entry<Var, Var>> select = retrieveSelectClause();
-
-            //global start-end time
-            long start = configuration.getLong(START_BINDING, 0);
-            long end = configuration.getLong(END_BINDING, System.currentTimeMillis());
-
-            int whereSize = where.size() + select.size() + ((!isTimeRange(lookup, configuration)) ? 0 : 1);
-
-            if (subject.hasValue()
-                    && where.size() == 0  /* Not using whereSize, because we can set up the TimeRange in the scanner */
-                    && select.size() == 0) {
-                /**
-                 * Case 1: Subject is set, but predicate, object are not.
-                 * Return all for the subject
-                 */
-                this.scanner = scannerForSubject(subject.getValue());
-                if (this.scanner == null) {
-                    this.iter = new EmptyIteration();
-                    return;
-                }
-                Map.Entry<Var, Var> predObj = lookup.getPredicateObjectPairs().get(0);
-                this.iter = new SelectAllIterator(this.bindings, this.scanner.iterator(), predObj.getKey(), predObj.getValue());
-            } else if (subject.hasValue()
-                    && where.size() == 0 /* Not using whereSize, because we can set up the TimeRange in the scanner */) {
-                /**
-                 * Case 2: Subject is set, and a few predicates are set, but no objects
-                 * Return all, and filter which predicates you are interested in
-                 */
-                this.scanner = scannerForSubject(subject.getValue());
-                if (this.scanner == null) {
-                    this.iter = new EmptyIteration();
-                    return;
-                }
-                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-            } else if (subject.hasValue()
-                    && where.size() >= 1 /* Not using whereSize, because we can set up the TimeRange in the scanner */) {
-                /**
-                 * Case 2a: Subject is set, and a few predicates are set, and one object
-                 * TODO: For now we will ignore the predicate-object filter because we do not know how to query for this
-                 */
-                this.scanner = scannerForSubject(subject.getValue());
-                if (this.scanner == null) {
-                    this.iter = new EmptyIteration();
-                    return;
-                }
-                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-            } else if (!subject.hasValue() && whereSize > 1) {
-                /**
-                 * Case 3: Subject is not set, more than one where clause
-                 */
-                this.scanner = scannerForPredicateObject(lookup, start, end, where, select);
-                if (this.scanner == null) {
-                    this.iter = new EmptyIteration();
-                    return;
-                }
-                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-//                this.iter = new SubjectSelectIterator(this.bindings, this.scanner.iterator(), subject, select);
-            } else if (!subject.hasValue() && whereSize == 1 && select.size() == 0) {
-                /**
-                 * Case 4: No subject, only one where clause
-                 */
-                Map.Entry<Var, Var> predObj = null;
-                if (where.size() == 1) {
-                    predObj = where.get(0);
-                }
-                this.scanner = scannerForPredicateObject(lookup, start, end, predObj);
-                if (this.scanner == null) {
-                    this.iter = new EmptyIteration();
-                    return;
-                }
-                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-//                this.iter = new SubjectSelectIterator(this.bindings, this.scanner.iterator(), subject, select);
-            } else if (!subject.hasValue() && select.size() > 1) {
-
-                /**
-                 * Case 5: No subject, no where (multiple select)
-                 */
-                this.scanner = scannerForPredicates(start, end, select);
-                if (this.scanner == null) {
-                    this.iter = new EmptyIteration();
-                    return;
-                }
-                this.iter = new FilterIterator(this.bindings, this.scanner.iterator(), subject, select);
-            } else if (!subject.hasValue() && select.size() == 1) {
-                /**
-                 * Case 5: No subject, no where (just 1 select)
-                 */
-                cloudbase.core.client.Scanner sc = scannerForPredicate(lookup, start, end, (URI) select.get(0).getKey().getValue());
-                if (sc == null) {
-                    this.iter = new EmptyIteration();
-                    return;
-                }                                             //TODO: Fix, put in concrete class
-                final Iterator<Map.Entry<Key, Value>> scIter = sc.iterator();
-                this.iter = new FilterIterator(this.bindings, scIter, subject, select);
-            } else {
-                throw new QueryEvaluationException("Case not supported as of yet");
-            }
-
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    protected List<Map.Entry<Var, Var>> retrieveWhereClause() {
-        List<Map.Entry<Var, Var>> where = new ArrayList<Map.Entry<Var, Var>>();
-        for (Map.Entry<Var, Var> entry : lookup.getPredicateObjectPairs()) {
-            Var pred = entry.getKey();
-            Var object = entry.getValue();
-            if (pred.hasValue() && object.hasValue()) {
-                where.add(entry); //TODO: maybe we should clone this?
-            }
-        }
-        return where;
-    }
-
-    protected List<Map.Entry<Var, Var>> retrieveSelectClause() {
-        List<Map.Entry<Var, Var>> select = new ArrayList<Map.Entry<Var, Var>>();
-        for (Map.Entry<Var, Var> entry : lookup.getPredicateObjectPairs()) {
-            Var pred = entry.getKey();
-            Var object = entry.getValue();
-            if (pred.hasValue() && !object.hasValue()) {
-                select.add(entry); //TODO: maybe we should clone this?
-            }
-        }
-        return select;
-    }
-
-    @Override
-    public void close() throws QueryEvaluationException {
-        if (this.scanner != null) {
-            this.scanner.close();
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws QueryEvaluationException {
-        return iter.hasNext();
-    }
-
-    @Override
-    public BindingSet next() throws QueryEvaluationException {
-        try {
-            return iter.next();
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    @Override
-    public void remove() throws QueryEvaluationException {
-        iter.next();
-    }
-
-    /**
-     * Utility methods to set up the scanner/batch scanner
-     */
-
-    protected List<Text> shardForSubject(org.openrdf.model.Value subject) throws TableNotFoundException, IOException {
-        BatchScanner scanner = createBatchScanner(this.shardTable);
-        try {
-            scanner.setRanges(Collections.singleton(
-                    new Range(new Text(writeValue(subject)))
-            ));
-            Iterator<Map.Entry<Key, Value>> shardIter = scanner.iterator();
-            if (!shardIter.hasNext()) {
-                return null;
-            }
-
-            List<Text> shards = new ArrayList<Text>();
-            while (shardIter.hasNext()) {
-                shards.add(shardIter.next().getKey().getColumnFamily());
-            }
-            //MMRTS-147 so that we can return subjects from multiple shards
-            return shards;
-        } finally {
-            if (scanner != null)
-                scanner.close();
-        }
-    }
-
-
-    protected BatchScanner scannerForSubject(org.openrdf.model.Value subject) throws TableNotFoundException, IOException {
-        List<Text> shards = shardForSubject(subject);
-
-        if (shards == null)
-            return null;
-
-        BatchScanner scanner = createBatchScanner(this.table);
-
-//        scanner.setScanIterators(21, CellLevelRecordIterator.class.getName(), "ci");
-        Collection<Range> ranges = new ArrayList<Range>();
-        for (Text shard : shards) {
-            ranges.add(new Range(
-                    new Key(
-                            shard, DOC,
-                            new Text(URI_MARKER_STR + subject + FAMILY_DELIM_STR + "\0")
-                    ),
-                    new Key(
-                            shard, DOC,
-                            new Text(URI_MARKER_STR + subject + FAMILY_DELIM_STR + "\uFFFD")
-                    )
-            ));
-        }
-        scanner.setRanges(ranges);
-        return scanner;
-    }
-
-    protected BatchScanner scannerForPredicateObject(ShardSubjectLookup lookup, Long start, Long end, List<Map.Entry<Var, Var>> predObjs, List<Map.Entry<Var, Var>> select) throws IOException, TableNotFoundException {
-        start = validateFillStartTime(start, lookup);
-        end = validateFillEndTime(end, lookup);
-
-        int extra = 0;
-
-        if (isTimeRange(lookup, configuration)) {
-            extra += 1;
-        }
-
-        Text[] queries = new Text[predObjs.size() + select.size() + extra];
-        int qi = 0;
-        for (Map.Entry<Var, Var> predObj : predObjs) {
-            ByteArrayDataOutput output = ByteStreams.newDataOutput();
-            writeValue(output, predObj.getKey().getValue());
-            output.write(INDEX_DELIM);
-            writeValue(output, predObj.getValue().getValue());
-            queries[qi++] = new Text(output.toByteArray());
-        }
-        for (Map.Entry<Var, Var> predicate : select) {
-            queries[qi++] = new Text(GMDenIntersectingIterator.getRangeTerm(INDEX.toString(),
-                    URI_MARKER_STR + predicate.getKey().getValue() + INDEX_DELIM_STR + "\0"
-                    , true,
-                    URI_MARKER_STR + predicate.getKey().getValue() + INDEX_DELIM_STR + "\uFFFD",
-                    true
-            ));
-        }
-
-        if (isTimeRange(lookup, configuration)) {
-            queries[queries.length - 1] = new Text(
-                    GMDenIntersectingIterator.getRangeTerm(INDEX.toString(),
-                            getStartTimeRange(lookup, configuration)
-                            , true,
-                            getEndTimeRange(lookup, configuration),
-                            true
-                    )
-            );
-        }
-
-        BatchScanner bs = createBatchScanner(this.table);
-
-        bs.setScanIterators(21, CellLevelRecordIterator.class.getName(), "ci");
-        bs.setScanIteratorOption("ci", CBConverter.OPTION_VALUE_DELIMITER, VALUE_DELIMITER);
-
-        bs.setScanIterators(20, GMDenIntersectingIterator.class.getName(), "ii");
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, DOC.toString());
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, INDEX.toString());
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(queries));
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + true);
-
-        Range range = new Range(
-                new Key(new Text(generator.generateShardValue(start, null) + "\0")),
-                new Key(new Text(generator.generateShardValue(end, null) + "\uFFFD"))
-        );
-        bs.setRanges(Collections.singleton(
-                range
-        ));
-
-        return bs;
-    }
-
-    protected BatchScanner scannerForPredicateObject(ShardSubjectLookup lookup, Long start, Long end, Map.Entry<Var, Var> predObj) throws IOException, TableNotFoundException {
-        start = validateFillStartTime(start, lookup);
-        end = validateFillEndTime(end, lookup);
-
-        BatchScanner bs = createBatchScanner(this.table);
-
-        bs.setScanIterators(21, CellLevelRecordIterator.class.getName(), "ci");
-        bs.setScanIteratorOption("ci", CBConverter.OPTION_VALUE_DELIMITER, VALUE_DELIMITER);
-
-        bs.setScanIterators(20, SortedRangeIterator.class.getName(), "ri");
-        bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_DOC_COLF, DOC.toString());
-        bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_COLF, INDEX.toString());
-        bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_START_INCLUSIVE, "" + true);
-        bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_END_INCLUSIVE, "" + true);
-        bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_MULTI_DOC, "" + true);
-
-        if (isTimeRange(lookup, configuration)) {
-            String startRange = getStartTimeRange(lookup, configuration);
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND,
-                    startRange);
-            String endRange = getEndTimeRange(lookup, configuration);
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND,
-                    endRange);
-        } else {
-
-            ByteArrayDataOutput output = ByteStreams.newDataOutput();
-            writeValue(output, predObj.getKey().getValue());
-            output.write(INDEX_DELIM);
-            writeValue(output, predObj.getValue().getValue());
-
-            String bound = new String(output.toByteArray());
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_LOWER_BOUND, bound);
-            bs.setScanIteratorOption("ri", SortedRangeIterator.OPTION_UPPER_BOUND, bound + "\00");
-        }
-
-        //TODO: Do we add a time predicate to this?
-//        bs.setScanIterators(19, FilteringIterator.class.getName(), "filteringIterator");
-//        bs.setScanIteratorOption("filteringIterator", "0", TimeRangeFilter.class.getName());
-//        bs.setScanIteratorOption("filteringIterator", "0." + TimeRangeFilter.TIME_RANGE_PROP, (end - start) + "");
-//        bs.setScanIteratorOption("filteringIterator", "0." + TimeRangeFilter.START_TIME_PROP, end + "");
-
-        Range range = new Range(
-                new Key(new Text(generator.generateShardValue(start, null) + "\0")),
-                new Key(new Text(generator.generateShardValue(end, null) + "\uFFFD"))
-        );
-        bs.setRanges(Collections.singleton(
-                range
-        ));
-
-        return bs;
-    }
-
-    protected BatchScanner scannerForPredicates(Long start, Long end, List<Map.Entry<Var, Var>> predicates) throws IOException, TableNotFoundException {
-        start = validateFillStartTime(start, lookup);
-        end = validateFillEndTime(end, lookup);
-
-        int extra = 0;
-
-        if (isTimeRange(lookup, configuration)) {
-            extra += 1;
-        }
-
-        Text[] queries = new Text[predicates.size() + extra];
-        for (int i = 0; i < predicates.size(); i++) {
-            Map.Entry<Var, Var> predicate = predicates.get(i);
-            queries[i] = new Text(GMDenIntersectingIterator.getRangeTerm(INDEX.toString(),
-                    URI_MARKER_STR + predicate.getKey().getValue() + INDEX_DELIM_STR + "\0"
-                    , true,
-                    URI_MARKER_STR + predicate.getKey().getValue() + INDEX_DELIM_STR + "\uFFFD",
-                    true
-            ));
-        }
-
-        if (isTimeRange(lookup, configuration)) {
-            queries[queries.length - 1] = new Text(
-                    GMDenIntersectingIterator.getRangeTerm(INDEX.toString(),
-                            getStartTimeRange(lookup, configuration)
-                            , true,
-                            getEndTimeRange(lookup, configuration),
-                            true
-                    )
-            );
-        }
-
-        BatchScanner bs = createBatchScanner(this.table);
-        bs.setScanIterators(21, CellLevelRecordIterator.class.getName(), "ci");
-        bs.setScanIteratorOption("ci", CBConverter.OPTION_VALUE_DELIMITER, VALUE_DELIMITER);
-
-        bs.setScanIterators(20, GMDenIntersectingIterator.class.getName(), "ii");
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.docFamilyOptionName, DOC.toString());
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.indexFamilyOptionName, INDEX.toString());
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.columnFamiliesOptionName, GMDenIntersectingIterator.encodeColumns(queries));
-        bs.setScanIteratorOption("ii", GMDenIntersectingIterator.OPTION_MULTI_DOC, "" + true);
-
-        Range range = new Range(
-                new Key(new Text(generator.generateShardValue(start, null) + "\0")),
-                new Key(new Text(generator.generateShardValue(end, null) + "\uFFFD"))
-        );
-        bs.setRanges(Collections.singleton(
-                range
-        ));
-
-        return bs;
-    }
-
-    protected cloudbase.core.client.Scanner scannerForPredicate(ShardSubjectLookup lookup, Long start, Long end, URI predicate) throws IOException, TableNotFoundException {
-        start = validateFillStartTime(start, lookup);
-        end = validateFillEndTime(end, lookup);
-
-        cloudbase.core.client.Scanner sc = createScanner(this.table);
-
-        Range range = new Range(
-                new Key(new Text(generator.generateShardValue(start, null) + "\0")),
-                new Key(new Text(generator.generateShardValue(end, null) + "\uFFFD"))
-        );
-        sc.setRange(range);
-        sc.fetchColumnFamily(INDEX);
-        sc.setColumnFamilyRegex(INDEX.toString());
-        sc.setColumnQualifierRegex(URI_MARKER_STR + predicate + INDEX_DELIM_STR + "(.*)");
-
-        return sc;
-    }
-
-    protected cloudbase.core.client.Scanner createScanner(String sTable) throws TableNotFoundException {
-        return connector.createScanner(sTable, authorizations);
-    }
-
-    protected BatchScanner createBatchScanner(String sTable) throws TableNotFoundException {
-        return createBatchScanner(sTable, numThreads);
-    }
-
-    protected BatchScanner createBatchScanner(String sTable, int numThreads) throws TableNotFoundException {
-        return connector.createBatchScanner(sTable, authorizations, numThreads);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/SubjectGroupingOptimizer.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/SubjectGroupingOptimizer.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/SubjectGroupingOptimizer.java
deleted file mode 100644
index 782cfb9..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/SubjectGroupingOptimizer.java
+++ /dev/null
@@ -1,178 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation;
-
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import mvm.mmrts.rdf.partition.utils.CountPredObjPairs;
-import org.apache.hadoop.conf.Configuration;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-
-/**
- * Date: Jul 14, 2011
- * Time: 4:14:16 PM
- */
-public class SubjectGroupingOptimizer implements QueryOptimizer {
-
-    private static final Comparator<Var> VAR_COMPARATOR = new VarComparator();
-    private static final Comparator<StatementPattern> SP_SUBJ_COMPARATOR = new SubjectComparator();
-    private static final Comparator<TupleExpr> STATS_SHARD_COMPARATOR = new ShardLookupComparator();
-    private static final CountPredObjPairs STATISTICS = new CountPredObjPairs();
-    private Configuration conf;
-
-    public SubjectGroupingOptimizer(Configuration conf) {
-        this.conf = conf;
-    }
-
-    @Override
-    public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindingSet) {
-        tupleExpr.visit(new FlattenJoinVisitor());
-    }
-
-    protected class FlattenJoinVisitor extends QueryModelVisitorBase<RuntimeException> {
-        @Override
-        public void meet(Join node) throws RuntimeException {
-            List<StatementPattern> flatten = getJoinArgs(node, new ArrayList<StatementPattern>());
-            //order by subject
-            Collections.sort(flatten, SP_SUBJ_COMPARATOR);
-
-            List<TupleExpr> shardLookups = new ArrayList<TupleExpr>();
-            Var current = null;
-            ShardSubjectLookup shardLookupCurrent = null;
-            for (StatementPattern sp : flatten) {
-                if (!sp.getSubjectVar().hasValue() && !sp.getPredicateVar().hasValue()) {
-                    // if there is nothing set in the subject or predicate, we treat it as a single item
-                    // might be ?s ?p ?o
-                    shardLookups.add(sp);
-                } else {
-                    Var subjectVar = sp.getSubjectVar();
-                    if (VAR_COMPARATOR.compare(current, subjectVar) != 0) {
-                        current = subjectVar;
-                        shardLookupCurrent = new ShardSubjectLookup(current);
-                        populateLookup(shardLookupCurrent);
-                        shardLookups.add(shardLookupCurrent);
-                    }
-                    shardLookupCurrent.addPredicateObjectPair(sp.getPredicateVar(), sp.getObjectVar());
-                }
-            }
-
-            int i = 0;
-            Collections.sort(shardLookups, STATS_SHARD_COMPARATOR);
-            TupleExpr replacement = shardLookups.get(i);
-            for (i++; i < shardLookups.size(); i++) {
-                replacement = new Join(replacement, shardLookups.get(i));
-            }
-
-            node.replaceWith(replacement);
-        }
-
-        @Override
-        public void meet(StatementPattern node) throws RuntimeException {
-            ShardSubjectLookup lookup = new ShardSubjectLookup(node.getSubjectVar());
-            lookup.addPredicateObjectPair(node.getPredicateVar(), node.getObjectVar());
-            populateLookup(lookup);
-            node.replaceWith(lookup);
-        }
-    }
-
-    protected <L extends List<StatementPattern>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
-        if (tupleExpr instanceof Join) {
-            Join join = (Join) tupleExpr;
-            getJoinArgs(join.getLeftArg(), joinArgs);
-            getJoinArgs(join.getRightArg(), joinArgs);
-        } else if (tupleExpr instanceof StatementPattern) {
-            joinArgs.add((StatementPattern) tupleExpr);
-        }
-
-        return joinArgs;
-    }
-
-    protected ShardSubjectLookup populateLookup(ShardSubjectLookup lookup) {
-        String timePredicate = conf.get(lookup.getSubject().getName() + "." + TIME_PREDICATE);
-        if (timePredicate != null) {
-            lookup.setTimePredicate(timePredicate);
-            lookup.setStartTimeRange(conf.get(lookup.getSubject().getName() + "." + START_BINDING));
-            lookup.setEndTimeRange(conf.get(lookup.getSubject().getName() + "." + END_BINDING));
-            lookup.setTimeType(TimeType.valueOf(conf.get(lookup.getSubject().getName() + "." + TIME_TYPE_PROP, TimeType.XMLDATETIME.name())));
-        }
-
-        String shardRange = conf.get(lookup.getSubject().getName() + "." + SHARDRANGE_BINDING);
-        if(shardRange != null) {
-            lookup.setShardStartTimeRange(conf.get(lookup.getSubject().getName() + "." + SHARDRANGE_START));
-            lookup.setShardEndTimeRange(conf.get(lookup.getSubject().getName() + "." + SHARDRANGE_END));
-        }
-
-        return lookup;
-    }
-
-    protected static class SubjectComparator implements Comparator<StatementPattern> {
-
-        @Override
-        public int compare(StatementPattern a, StatementPattern b) {
-            if (a == b)
-                return 0;
-
-            if (a == null || b == null)
-                return 1;
-
-            if (a.getSubjectVar().equals(b.getSubjectVar())) {
-                if (a.getPredicateVar().hasValue() && b.getPredicateVar().hasValue())
-                    return 0;
-                if (a.getPredicateVar().hasValue() && !b.getPredicateVar().hasValue())
-                    return -1;
-                if (!a.getPredicateVar().hasValue() && b.getPredicateVar().hasValue())
-                    return 1;
-                return 0;
-            }
-
-            if (a.getSubjectVar().getValue() != null && b.getSubjectVar().getValue() != null &&
-                    a.getSubjectVar().getValue().equals(b.getSubjectVar().getValue()))
-                return 0;
-
-            return 1;
-        }
-    }
-
-    protected static class ShardLookupComparator implements Comparator<TupleExpr> {
-
-        @Override
-        public int compare(TupleExpr a, TupleExpr b) {
-            double a_c = STATISTICS.getCount(a);
-            double b_c = STATISTICS.getCount(b);
-            double diff = a_c - b_c;
-            return (int) (diff / Math.abs(diff));
-        }
-    }
-
-    protected static class VarComparator implements Comparator<Var> {
-
-        @Override
-        public int compare(Var a, Var b) {
-            if (a == b)
-                return 0;
-            if (a == null || b == null)
-                return 1;
-
-            if (a.equals(b))
-                return 0;
-
-            if (a.getValue() != null &&
-                    b.getValue() != null &&
-                    a.getValue().equals(b.getValue()))
-                return 0;
-
-            return 1;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/FilterIterator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/FilterIterator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/FilterIterator.java
deleted file mode 100644
index 7da4276..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/FilterIterator.java
+++ /dev/null
@@ -1,100 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation.select;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.collect.Lists;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.QueryBindingSet;
-
-import java.util.*;
-
-/**
- * TODO: This could be done as a filtering iterator in the Iterator Stack
- */
-public class FilterIterator extends SelectIterator {
-
-    private List<Map.Entry<Var, Var>> predObjs;
-    private Map<URI, Map.Entry<Var, Var>> filters = new HashMap<URI, Map.Entry<Var, Var>>();
-    private List<Statement> document;
-    private List<Map.Entry<Var, Var>> currentPredObj;
-    private Var subjVar;
-    private List<QueryBindingSet> currentResults;
-    private int currentResultsIndex = 0;
-
-    public FilterIterator(BindingSet bindings, Iterator<Map.Entry<Key, Value>> iter, Var subjVar, List<Map.Entry<Var, Var>> predObjs) throws QueryEvaluationException {
-        super(bindings, iter);
-        this.subjVar = subjVar;
-        this.predObjs = predObjs;
-        for (Map.Entry<Var, Var> predObj : this.predObjs) {
-            //find filtering predicates
-            this.filters.put((URI) predObj.getKey().getValue(), predObj);
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws QueryEvaluationException {
-        if (document != null || currentResults != null)
-            return true;
-
-        return super.hasNext();
-
-//        boolean hasNext = super.hasNext();
-//        List<Map.Entry<Var, Var>> filter = null;
-//        while (hasNext) {
-//            List<Statement> stmts = nextDocument();
-//            filter = filter(stmts);
-//            if (filter != null && filter.size() > 0) {
-//                document = stmts;
-//                this.currentPredObj = filter;
-//                return true;
-//            }
-//            hasNext = super.hasNext();
-//        }
-//        return document != null;
-    }
-
-    @Override
-    public BindingSet next() throws QueryEvaluationException {
-        try {
-            if (document == null) {
-                document = nextDocument();
-            }
-            if (currentResults == null) {
-                currentResults = populateBindingSet(document, subjVar, this.predObjs);
-            }
-            BindingSet bs = currentResults.get(currentResultsIndex);
-            currentResultsIndex++;
-            if (currentResultsIndex >= currentResults.size()) {
-                currentResults = null;
-                currentResultsIndex = 0;
-                document = null;
-            }
-            return bs;
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-    /**
-     * @return true if the Statement is filtered
-     * @throws QueryEvaluationException
-     */
-    protected List<Map.Entry<Var, Var>> filter(List<Statement> document) throws QueryEvaluationException {
-        List<Map.Entry<Var, Var>> foundIn = new ArrayList();
-
-        for (Statement st : document) {
-            for (Map.Entry<Var, Var> entry : this.predObjs) {
-                if (st.getPredicate().equals(entry.getKey().getValue())) {
-                    foundIn.add(entry);
-                    break;
-                }
-            }
-        }
-        return foundIn;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectAllIterator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectAllIterator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectAllIterator.java
deleted file mode 100644
index ebe23dc..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectAllIterator.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation.select;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.collect.Lists;
-import org.openrdf.model.Statement;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.Var;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Class SelectAllIterator
- * Date: Jul 18, 2011
- * Time: 12:01:25 PM
- */
-public class SelectAllIterator extends SelectIterator {
-
-    private List<Map.Entry<Var, Var>> predObj;
-    private List<Statement> document = null;
-    private int index = 0;
-
-    public SelectAllIterator(BindingSet bindings, Iterator<Map.Entry<Key, Value>> iter, Var predVar, Var objVar) throws QueryEvaluationException {
-        super(bindings, iter);
-        predObj = (List) Lists.newArrayList(new HashMap.SimpleEntry(predVar, objVar));
-    }
-
-    @Override
-    public boolean hasNext() throws QueryEvaluationException {
-        return super.hasNext() || document != null;
-    }
-
-    @Override
-    public BindingSet next() throws QueryEvaluationException {
-        try {
-            if (document == null && super.hasNext()) {
-                document = nextDocument();
-            }
-            Statement st = document.get(index);
-            index++;
-            if (index >= document.size()) {
-                document = null;
-            }
-            return populateBindingSet(st, predObj);
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectIterator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectIterator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectIterator.java
deleted file mode 100644
index e6efa2b..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SelectIterator.java
+++ /dev/null
@@ -1,270 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation.select;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.Iterators;
-import com.google.common.collect.Multimap;
-import com.google.common.collect.PeekingIterator;
-import com.google.common.io.ByteStreams;
-import info.aduna.iteration.CloseableIteration;
-import mvm.mmrts.rdf.partition.utils.RdfIO;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.impl.StatementImpl;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.QueryBindingSet;
-import ss.cloudbase.core.iterators.filter.CBConverter;
-
-import java.util.*;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.*;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.readStatement;
-
-/**
- * Class SelectAllIterator
- * Date: Jul 18, 2011
- * Time: 12:01:25 PM
- */
-public abstract class SelectIterator implements CloseableIteration<BindingSet, QueryEvaluationException> {
-
-    protected PeekingIterator<Map.Entry<Key, Value>> iter;
-    protected BindingSet bindings;
-    protected CBConverter converter = new CBConverter();
-
-    private boolean hasNext = true;
-
-    public SelectIterator(BindingSet bindings, Iterator<Map.Entry<Key, Value>> iter) {
-        this.bindings = bindings;
-        this.iter = Iterators.peekingIterator(iter);
-        converter.init(Collections.singletonMap(CBConverter.OPTION_VALUE_DELIMITER, VALUE_DELIMITER));
-    }
-
-    @Override
-    public void close() throws QueryEvaluationException {
-
-    }
-
-    @Override
-    public boolean hasNext() throws QueryEvaluationException {
-        return statefulHasNext();
-    }
-
-    protected boolean statefulHasNext() {
-        hasNext = iter.hasNext() && hasNext;
-        return hasNext;
-    }
-
-    protected List<Statement> nextDocument() throws QueryEvaluationException {
-        try {
-            Map.Entry<Key, Value> entry = iter.peek();
-            Key key = entry.getKey();
-            Value value = entry.getValue();
-
-            if (value.getSize() == 0) {
-                //not an aggregate document
-                return nextNonAggregateDocument();
-//                return Collections.singletonList(RdfIO.readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY, true));
-            }
-
-            List<Statement> document = new ArrayList<Statement>();
-
-            org.openrdf.model.Value subj = RdfIO.readValue(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY, FAMILY_DELIM);
-            Map<String, String> map = converter.toMap(entry.getKey(), value);
-            for (Map.Entry<String, String> e : map.entrySet()) {
-                String predObj = e.getKey();
-                String[] split = predObj.split(FAMILY_DELIM_STR);
-                document.add(new StatementImpl((Resource) subj, VALUE_FACTORY.createURI(split[0]), RdfIO.readValue(ByteStreams.newDataInput(split[1].getBytes()), VALUE_FACTORY, FAMILY_DELIM)));
-            }
-            iter.next();
-            return document;
-        } catch (Exception e) {
-            throw new QueryEvaluationException("Error retrieving document", e);
-        }
-    }
-
-//    protected List<Statement> nextDocument() throws QueryEvaluationException {
-//        try {
-//            List<? extends Map.Entry<Key, Value>> entryList = iter.next();
-//            List<Statement> document = new ArrayList();
-//            for (Map.Entry<Key, Value> keyValueEntry : entryList) {
-//                Statement stmt = null;
-//                Key key = keyValueEntry.getKey();
-//                if (DOC.equals(key.getColumnFamily()))
-//                    stmt = readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY);
-//                else
-//                    stmt = readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY, false);
-//                document.add(stmt);
-//            }
-//            return document;
-//        } catch (Exception e) {
-//            throw new QueryEvaluationException(e);
-//        }
-//    }
-
-//    protected List<Statement> nextDocument() throws QueryEvaluationException {
-//        return documentIter.next();
-//    }
-
-    protected List<Statement> nextNonAggregateDocument() throws QueryEvaluationException {
-        try {
-            List<Statement> document = new ArrayList<Statement>();
-            if (!statefulHasNext())
-                return document;
-            Statement stmt = peekNextStatement();
-            if (stmt == null)
-                return document;
-
-            Resource subject = stmt.getSubject();
-            Resource current = subject;
-            document.add(stmt);
-            while ((current.equals(subject) && statefulHasNext())) {
-                advance();
-                current = subject;
-                stmt = peekNextStatement();
-                if (stmt != null) {
-                    subject = stmt.getSubject();
-                    if (subject.equals(current))
-                        document.add(stmt);
-                } else
-                    subject = null;
-            }
-//            System.out.println(document);
-            return document;
-        } catch (Exception e) {
-            throw new QueryEvaluationException(e);
-        }
-    }
-
-//    protected Statement nextStatement() throws Exception {
-//        List<Map.Entry<Key, Value>> entryList = iter.next();
-//        for (Map.Entry<Key, Value> keyValueEntry : entryList) {
-//
-//        }
-//        Map.Entry<Key, Value> entry = iter.next();
-//        Key key = entry.getKey();
-//        if (DOC.equals(key.getColumnFamily()))
-//            return readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY);
-//        else
-//            return readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY, false);
-//    }
-
-    protected Statement peekNextStatement() throws Exception {
-        if (!statefulHasNext())
-            return null;
-        Map.Entry<Key, Value> entry = iter.peek();
-        Key key = entry.getKey();
-        if (DOC.equals(key.getColumnFamily()))
-            return readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY);
-        else
-            return readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY, false);
-    }
-
-    protected void advance() throws Exception {
-        iter.next();
-    }
-
-    @Override
-    public void remove() throws QueryEvaluationException {
-        iter.next();
-    }
-
-    protected BindingSet populateBindingSet(Statement st, List<Map.Entry<Var, Var>> predObjVar) {
-        QueryBindingSet result = new QueryBindingSet(bindings);
-        for (Map.Entry<Var, Var> entry : predObjVar) {
-            Var predVar = entry.getKey();
-            Var objVar = entry.getValue();
-            if (predVar != null && !result.hasBinding(predVar.getName()))
-                result.addBinding(predVar.getName(), st.getPredicate());
-            if (objVar != null && !result.hasBinding(objVar.getName()))
-                result.addBinding(objVar.getName(), st.getObject());
-        }
-        return result;
-    }
-
-    protected List<QueryBindingSet> populateBindingSet(List<Statement> document, Var subjVar, List<Map.Entry<Var, Var>> predObjVar) {
-        //convert document to a multimap
-        Multimap<URI, Statement> docMap = ArrayListMultimap.create();
-        for (Statement st : document) {
-            docMap.put(st.getPredicate(), st);
-        }
-
-        List<QueryBindingSet> results = new ArrayList<QueryBindingSet>();
-        QueryBindingSet bs0 = new QueryBindingSet(bindings);
-//        QueryBindingSet result = new QueryBindingSet(bindings);
-
-        if (document.size() > 0) {
-            Statement stmt = document.get(0);
-            if (subjVar != null && !bs0.hasBinding(subjVar.getName())) {
-                bs0.addBinding(subjVar.getName(), stmt.getSubject());
-            }
-        }
-        results.add(bs0);
-
-//        for (Statement st : document) {
-        for (Map.Entry<Var, Var> entry : predObjVar) {
-            Var predVar = entry.getKey();
-            Var objVar = entry.getValue();
-
-//                if (predVar.hasValue() && !st.getPredicate().equals(predVar.getValue()))
-//                    continue;
-            if (predVar == null || !predVar.hasValue())
-                continue;
-            Collection<Statement> predSts = docMap.get((URI) predVar.getValue());
-
-//            if (predVar != null && !result.hasBinding(predVar.getName()))
-//                result.addBinding(predVar.getName(), st.getPredicate());
-//            if (objVar != null && !result.hasBinding(objVar.getName()))
-//                result.addBinding(objVar.getName(), st.getObject());
-
-            populateBindingSets(results, predVar, objVar, predSts);
-        }
-//        }
-        return results;
-    }
-
-    private void populateBindingSets(List<QueryBindingSet> results, Var predVar, Var objVar, Collection<Statement> stmts) {
-        if (predVar == null || objVar == null || stmts == null || stmts.size() == 0)
-            return;
-
-        List<QueryBindingSet> copyOf = new ArrayList<QueryBindingSet>(results);
-
-        int i = copyOf.size();
-        int j = 0;
-        for (Iterator<Statement> iter = stmts.iterator(); iter.hasNext();) {
-            Statement st = iter.next();
-            int k = 0;
-            for (QueryBindingSet result : results) {
-                if (!result.hasBinding(predVar.getName()) || k >= i) {
-                    String name = predVar.getName();
-                    org.openrdf.model.Value val = st.getPredicate();
-                    addBinding(result, name, val);
-                }
-                if (!result.hasBinding(objVar.getName()) || k >= i)
-                    addBinding(result, objVar.getName(), st.getObject());
-                k++;
-            }
-
-            i = copyOf.size() + j * copyOf.size();
-            j++;
-
-            if (iter.hasNext()) {
-                //copy results
-                for (QueryBindingSet copy : copyOf) {
-                    results.add(new QueryBindingSet(copy));
-                }
-            }
-
-        }
-    }
-
-    private void addBinding(QueryBindingSet result, String name, org.openrdf.model.Value val) {
-        if (result.hasBinding(name))
-            result.removeBinding(name);
-        result.addBinding(name, val);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SubjectSelectIterator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SubjectSelectIterator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SubjectSelectIterator.java
deleted file mode 100644
index fe0fca2..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/SubjectSelectIterator.java
+++ /dev/null
@@ -1,40 +0,0 @@
-//package mvm.mmrts.rdf.partition.query.evaluation.select;
-//
-//import cloudbase.core.data.Key;
-//import cloudbase.core.data.Value;
-//import org.openrdf.model.Statement;
-//import org.openrdf.query.BindingSet;
-//import org.openrdf.query.QueryEvaluationException;
-//import org.openrdf.query.algebra.Var;
-//import org.openrdf.query.algebra.evaluation.QueryBindingSet;
-//
-//import java.util.Iterator;
-//import java.util.List;
-//import java.util.Map;
-//
-///**
-// * Class SubjectSelectIterator
-// * Date: Jul 18, 2011
-// * Time: 3:38:16 PM
-// */
-//public class SubjectSelectIterator extends SelectIterator {
-//
-//    private Var subjVar;
-//    private List<Map.Entry<Var, Var>> select;
-//
-//    public SubjectSelectIterator(BindingSet bindings, Iterator<Map.Entry<Key, Value>> iter, Var subjVar, List<Map.Entry<Var, Var>> select) {
-//        super(bindings, iter);
-//        this.subjVar = subjVar;
-//        this.select = select;
-//    }
-//
-//    @Override
-//    public BindingSet next() throws QueryEvaluationException {
-//        List<Statement> document = nextDocument();
-//        if(document.size() != 6) {
-//            System.out.println("here");
-//        }
-//        return populateBindingSet(document, subjVar, this.select);
-//
-//    }
-//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/utils/DocumentIterator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/utils/DocumentIterator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/utils/DocumentIterator.java
deleted file mode 100644
index f1e6c74..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/evaluation/select/utils/DocumentIterator.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package mvm.mmrts.rdf.partition.query.evaluation.select.utils;
-
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Value;
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.ListMultimap;
-import com.google.common.io.ByteStreams;
-import org.openrdf.model.Resource;
-import org.openrdf.model.Statement;
-
-import java.util.*;
-
-import static mvm.mmrts.rdf.partition.PartitionConstants.DOC;
-import static mvm.mmrts.rdf.partition.PartitionConstants.VALUE_FACTORY;
-import static mvm.mmrts.rdf.partition.utils.RdfIO.readStatement;
-
-/**
- * This iterator will seek forward in the underlying BatchScanner Iterator and group
- * statements with the same subject.  This guards against the fact that the BatchScanner can return
- * statements out of order.
- * <br/>
- * TODO: Not the best solution.
- * Class DocumentIterator
- * Date: Aug 29, 2011
- * Time: 4:09:16 PM
- */
-public class DocumentIterator implements Iterator<List<Statement>> {
-
-    public static final int BATCH_SIZE = 1000;
-
-    private int batchSize = BATCH_SIZE; //will hold up to 100 subject documents
-    /**
-     * TODO: Check performance against other multi maps
-     */
-    private ListMultimap<Resource, Statement> documents = ArrayListMultimap.create();
-    //TODO: Hate having to keep track of this, expensive to constantly check the "contains"
-    /**
-     * We keep track of a queue of subjects, so that the first one in will most likely have all of its document
-     * in our batch before popping. This assumes also that the documents won't get larger than 1000 at the most.
-     */
-    private LinkedList<Resource> subjects = new LinkedList<Resource>();
-
-    private Iterator<Map.Entry<Key, Value>> iter;
-    private boolean hasNext = true;
-
-    public DocumentIterator(Iterator<Map.Entry<Key, Value>> iter) {
-        this(iter, BATCH_SIZE);
-    }
-
-    public DocumentIterator(Iterator<Map.Entry<Key, Value>> iter, int batchSize) {
-        this.iter = iter;
-        this.batchSize = batchSize;
-        fillDocumentMap();
-    }
-
-    protected void fillDocumentMap() {
-        try {
-            while ((documents.size() < batchSize) && statefulHasNext()) {
-                Statement stmt = nextStatement();
-                Resource subj = stmt.getSubject();
-                documents.put(subj, stmt);
-                if (!subjects.contains(subj))
-                    subjects.add(subj);
-            }
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    protected boolean statefulHasNext() {
-        hasNext = iter.hasNext() && hasNext;
-        return hasNext;
-    }
-
-    protected Statement nextStatement() throws Exception {
-        Map.Entry<Key, Value> entry = iter.next();
-        Key key = entry.getKey();
-        if (DOC.equals(key.getColumnFamily()))
-            return readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY);
-        else
-            return readStatement(ByteStreams.newDataInput(key.getColumnQualifier().getBytes()), VALUE_FACTORY, false);
-    }
-
-    @Override
-    public boolean hasNext() {
-        fillDocumentMap();
-        return documents.size() > 0;
-    }
-
-    @Override
-    public List<Statement> next() {
-        fillDocumentMap();
-        if (subjects.size() > 0) {
-            Resource subject = subjects.pop();
-            subjects.remove(subject);
-            List<Statement> doc = documents.removeAll(subject);
-            System.out.println(doc);
-            return doc;
-        }
-        return null;
-    }
-
-    @Override
-    public void remove() {
-        this.next();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/operators/ShardSubjectLookup.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/operators/ShardSubjectLookup.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/operators/ShardSubjectLookup.java
deleted file mode 100644
index 378606c..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/query/operators/ShardSubjectLookup.java
+++ /dev/null
@@ -1,167 +0,0 @@
-package mvm.mmrts.rdf.partition.query.operators;
-
-import mvm.mmrts.rdf.partition.PartitionConstants;
-import org.openrdf.query.algebra.QueryModelNodeBase;
-import org.openrdf.query.algebra.QueryModelVisitor;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.Var;
-
-import java.util.*;
-
-/**
- * Here the subject is not null, but there will be a list of
- * predicate/object paired vars that may or may not be null
- * <p/>
- * Class ShardSubjectLookup
- * Date: Jul 14, 2011
- * Time: 3:32:33 PM
- */
-public class ShardSubjectLookup extends QueryModelNodeBase implements TupleExpr {
-
-    private Var subject;
-    private List<Map.Entry<Var, Var>> predicateObjectPairs;
-
-    private String timePredicate;
-    private String startTimeRange;
-    private String endTimeRange;
-    private String shardStartTimeRange;
-    private String shardEndTimeRange;
-    private PartitionConstants.TimeType timeType;
-
-    public ShardSubjectLookup(Var subject) {
-        this(subject, new ArrayList<Map.Entry<Var, Var>>());
-    }
-
-    public ShardSubjectLookup(Var subject, List<Map.Entry<Var, Var>> predicateObjectPairs) {
-        this.subject = subject.clone();
-        this.predicateObjectPairs = new ArrayList<Map.Entry<Var, Var>>(predicateObjectPairs);
-    }
-
-    @Override
-    public <X extends Exception> void visit(QueryModelVisitor<X> visitor) throws X {
-        visitor.meetOther(this);
-    }
-
-    @Override
-    public <X extends Exception> void visitChildren(QueryModelVisitor<X> visitor) throws X {
-        visitor.meet(subject);
-        for (Map.Entry<Var, Var> predObj : predicateObjectPairs) {
-            visitor.meet(predObj.getKey());
-            visitor.meet(predObj.getValue());
-        }
-    }
-
-    @Override
-    public Set<String> getBindingNames() {
-        return getAssuredBindingNames();
-    }
-
-    @Override
-    public Set<String> getAssuredBindingNames() {
-        Set<String> bindingNames = new HashSet<String>(8);
-
-        if (subject != null) {
-            bindingNames.add(subject.getName());
-        }
-        for (Map.Entry<Var, Var> predObj : predicateObjectPairs) {
-            bindingNames.add(predObj.getKey().getName());
-            bindingNames.add(predObj.getValue().getName());
-        }
-
-        return bindingNames;
-    }
-
-    public void addPredicateObjectPair(Var predicate, Var object) {
-        this.predicateObjectPairs.add(new HashMap.SimpleEntry<Var, Var>(predicate, object));
-    }
-
-    public Var getSubject() {
-        return subject;
-    }
-
-    public void setSubject(Var subject) {
-        this.subject = subject;
-    }
-
-    public List<Map.Entry<Var, Var>> getPredicateObjectPairs() {
-        return predicateObjectPairs;
-    }
-
-    public void setPredicateObjectPairs(List<Map.Entry<Var, Var>> predicateObjectPairs) {
-        this.predicateObjectPairs = predicateObjectPairs;
-    }
-
-    public String getEndTimeRange() {
-        return endTimeRange;
-    }
-
-    public void setEndTimeRange(String endTimeRange) {
-        this.endTimeRange = endTimeRange;
-    }
-
-    public String getStartTimeRange() {
-        return startTimeRange;
-    }
-
-    public void setStartTimeRange(String startTimeRange) {
-        this.startTimeRange = startTimeRange;
-    }
-
-    public String getTimePredicate() {
-        return timePredicate;
-    }
-
-    public void setTimePredicate(String timePredicate) {
-        this.timePredicate = timePredicate;
-    }
-
-    public PartitionConstants.TimeType getTimeType() {
-        return timeType;
-    }
-
-    public void setTimeType(PartitionConstants.TimeType timeType) {
-        this.timeType = timeType;
-    }
-
-    public String getShardStartTimeRange() {
-        return shardStartTimeRange;
-    }
-
-    public void setShardStartTimeRange(String shardStartTimeRange) {
-        this.shardStartTimeRange = shardStartTimeRange;
-    }
-
-    public String getShardEndTimeRange() {
-        return shardEndTimeRange;
-    }
-
-    public void setShardEndTimeRange(String shardEndTimeRange) {
-        this.shardEndTimeRange = shardEndTimeRange;
-    }
-
-    public ShardSubjectLookup clone() {
-        return (ShardSubjectLookup) super.clone();
-    }
-
-    @Override
-    public boolean equals(Object other) {
-        return other instanceof ShardSubjectLookup && super.equals(other);
-    }
-
-    @Override
-    public int hashCode() {
-        return super.hashCode() ^ "ShardSubjectLookup".hashCode();
-    }
-
-    @Override
-    public String toString() {
-        return "ShardSubjectLookup{" +
-                "subject=" + subject +
-                ", predicateObjectPairs=" + predicateObjectPairs +
-                ", timePredicate='" + timePredicate + '\'' +
-                ", startTimeRange='" + startTimeRange + '\'' +
-                ", endTimeRange='" + endTimeRange + '\'' +
-                ", timeType=" + timeType +
-                '}';
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGenerator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGenerator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGenerator.java
deleted file mode 100644
index 304fadf..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/DateHashModShardValueGenerator.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package mvm.mmrts.rdf.partition.shard;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-/**
- * Class DateHashModShardValueGenerator
- * Date: Jul 6, 2011
- * Time: 6:29:50 PM
- */
-public class DateHashModShardValueGenerator implements ShardValueGenerator {
-
-    protected int baseMod = 50;
-
-    protected SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd");
-    private static final String DATE_SHARD_DELIM = "_";
-
-    public DateHashModShardValueGenerator() {
-    }
-
-    public DateHashModShardValueGenerator(SimpleDateFormat format, int baseMod) {
-        this.baseMod = baseMod;
-        this.format = format;
-    }
-
-    @Override
-    public String generateShardValue(Object obj) {
-        return this.generateShardValue(System.currentTimeMillis(), obj);
-    }
-
-    public String generateShardValue(Long date, Object obj) {
-        if (obj == null)
-            return format.format(new Date(date));
-        return format.format(new Date(date)) + DATE_SHARD_DELIM + (Math.abs(obj.hashCode() % baseMod));
-    }
-
-    public int getBaseMod() {
-        return baseMod;
-    }
-
-    public void setBaseMod(int baseMod) {
-        this.baseMod = baseMod;
-    }
-
-    public SimpleDateFormat getFormat() {
-        return format;
-    }
-
-    public void setFormat(SimpleDateFormat format) {
-        this.format = format;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/ShardValueGenerator.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/ShardValueGenerator.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/ShardValueGenerator.java
deleted file mode 100644
index 2b1c296..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/shard/ShardValueGenerator.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package mvm.mmrts.rdf.partition.shard;
-
-/**
- * Interface ShardValueGenerator
- * Date: Jul 6, 2011
- * Time: 6:29:08 PM
- */
-public interface ShardValueGenerator {
-
-    public String generateShardValue(Object obj);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/ContextsStatementImpl.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/ContextsStatementImpl.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/ContextsStatementImpl.java
deleted file mode 100644
index 966f546..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/ContextsStatementImpl.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package mvm.mmrts.rdf.partition.utils;
-
-import org.openrdf.model.Resource;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.impl.StatementImpl;
-
-/**
- * Class ContextsStatementImpl
- * Date: Aug 5, 2011
- * Time: 7:48:56 AM
- */
-public class ContextsStatementImpl extends StatementImpl {
-    private Resource[] contexts;
-
-    public ContextsStatementImpl(Resource subject, URI predicate, Value object, Resource... contexts) {
-        super(subject, predicate, object);
-        this.contexts = contexts;
-    }
-
-    public Resource[] getContexts() {
-        return contexts;
-    }
-
-    @Override
-    public Resource getContext() {
-        //return first context in array
-        return (contexts != null && contexts.length > 0) ? contexts[0] : null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/CountPredObjPairs.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/CountPredObjPairs.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/CountPredObjPairs.java
deleted file mode 100644
index 2b83c6b..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/CountPredObjPairs.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package mvm.mmrts.rdf.partition.utils;
-
-import mvm.mmrts.rdf.partition.query.operators.ShardSubjectLookup;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.Var;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Class CountPredObjPairs
- * Date: Apr 12, 2011
- * Time: 1:31:05 PM
- */
-public class CountPredObjPairs {
-
-    public CountPredObjPairs() {
-    }
-
-    public double getCount(TupleExpr expr) {
-        int count = 100;
-        if (expr instanceof ShardSubjectLookup) {
-            ShardSubjectLookup lookup = (ShardSubjectLookup) expr;
-            List<Map.Entry<Var, Var>> entries = lookup.getPredicateObjectPairs();
-            count -= (lookup.getSubject().hasValue()) ? 1 : 0;
-            count -= (lookup.getTimePredicate() != null) ? 1 : 0;
-            for (Map.Entry<Var, Var> entry : entries) {
-                count -= (entry.getValue().hasValue() && entry.getKey().hasValue()) ? 1 : 0;
-            }
-        } else if (expr instanceof StatementPattern) {
-            StatementPattern sp = (StatementPattern) expr;
-            count -= (sp.getSubjectVar().hasValue()) ? 1 : 0;
-            count -= (sp.getPredicateVar().hasValue() && sp.getObjectVar().hasValue()) ? 1 : 0;
-        }
-        return count;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/PartitionUtils.java
----------------------------------------------------------------------
diff --git a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/PartitionUtils.java b/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/PartitionUtils.java
deleted file mode 100644
index 3e3b024..0000000
--- a/partition/partition.rdf/src/main/java/mvm/mmrts/rdf/partition/utils/PartitionUtils.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package mvm.mmrts.rdf.partition.utils;
-
-/**
- * Class PartitionUtils
- * Date: Jul 6, 2011
- * Time: 11:49:11 AM
- */
-public class PartitionUtils {
-}



[17/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/pom.xml
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/pom.xml b/pig/accumulo.pig/pom.xml
index 998d1ca..1a1e5f9 100644
--- a/pig/accumulo.pig/pom.xml
+++ b/pig/accumulo.pig/pom.xml
@@ -1,56 +1,50 @@
 <?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <groupId>mvm.rya</groupId>
+        <groupId>org.apache.rya</groupId>
         <artifactId>rya.pig</artifactId>
         <version>3.2.10-SNAPSHOT</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
 
     <artifactId>accumulo.pig</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
+    <name>Apache Rya Accumulo Pig</name>
+
     <dependencies>
         <dependency>
-            <groupId>org.apache.zookeeper</groupId>
-            <artifactId>zookeeper</artifactId>
-	           <exclusions>
-                <!-- the log4j that comes with zookeeper 3.3.5 has some bad dependencies -->
-                <exclusion>
-                    <groupId>javax.jms</groupId>
-                    <artifactId>jms</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jdmk</groupId>
-                    <artifactId>jmxtools</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jmx</groupId>
-                    <artifactId>jmxri</artifactId>
-                </exclusion>
-            </exclusions>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.sail</artifactId>
         </dependency>
         <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.sail.impl</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
+            <groupId>org.apache.rya</groupId>
             <artifactId>accumulo.rya</artifactId>
         </dependency>
+
         <dependency>
             <groupId>org.openrdf.sesame</groupId>
             <artifactId>sesame-queryparser-sparql</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
+
         <dependency>
             <groupId>org.apache.pig</groupId>
             <artifactId>pig</artifactId>
@@ -61,43 +55,30 @@
             <artifactId>antlr-runtime</artifactId>
             <scope>provided</scope>
         </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
     <build>
         <plugins>
             <plugin>
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <configuration>
+                    <excludes combine.children="append">
+                        <exclude>src/test/resources/ResultsFile1.txt</exclude>
+                        <exclude>src/test/resources/testQuery.txt</exclude>
+                        <exclude>src/test/resources/testQuery2.txt</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+            <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-shade-plugin</artifactId>
             </plugin>
         </plugins>
     </build>
-
-    <profiles>
-        <profile>
-            <id>accumulo</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.accumulo</groupId>
-                    <artifactId>accumulo-core</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-        <profile>
-            <id>cloudbase</id>
-            <activation>
-                <activeByDefault>false</activeByDefault>
-            </activation>
-            <dependencies>
-                <dependency>
-                    <groupId>com.texeltek</groupId>
-                    <artifactId>accumulo-cloudbase-shim</artifactId>
-                    <optional>true</optional>
-                </dependency>
-            </dependencies>
-        </profile>
-    </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java
index 6ffedfe..054146d 100644
--- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java
+++ b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/AccumuloStorage.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
@@ -379,4 +380,4 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
         stream.close();
         return range;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java
index d5c289d..392c108 100644
--- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java
+++ b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/IndexWritingTool.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.File;
 import java.io.IOException;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java
index 10ddf4c..ed8134d 100644
--- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java
+++ b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlQueryPigEngine.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import com.google.common.base.Preconditions;
 import com.google.common.io.ByteStreams;
 import mvm.rya.accumulo.AccumuloRdfConfiguration;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java
index b2d6886..38d8adb 100644
--- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java
+++ b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitor.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.model.Literal;
 import org.openrdf.model.URI;
 import org.openrdf.model.Value;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java
index 86d9356..9ec9d45 100644
--- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java
+++ b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/StatementPatternStorage.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java
index 181f72b..4b458b6 100644
--- a/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java
+++ b/pig/accumulo.pig/src/main/java/mvm/rya/accumulo/pig/optimizer/SimilarVarJoinOptimizer.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig.optimizer;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.Dataset;
 import org.openrdf.query.algebra.*;
@@ -206,4 +207,4 @@ public class SimilarVarJoinOptimizer implements QueryOptimizer {
         }
 
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java
index ca52afb..119ccb1 100644
--- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java
+++ b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/AccumuloStorageTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java
index 33ed54b..02a6f84 100644
--- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java
+++ b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/IndexWritingToolTest.java
@@ -1,5 +1,25 @@
 package mvm.rya.accumulo.pig;
 
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
 import java.io.File;
 import java.io.IOException;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java
index d6cde67..e4cf10e 100644
--- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java
+++ b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlQueryPigEngineTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import org.apache.pig.ExecType;
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java
index 1b0a383..b011a24 100644
--- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java
+++ b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/SparqlToPigTransformVisitorTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import junit.framework.TestCase;
 import mvm.rya.accumulo.pig.optimizer.SimilarVarJoinOptimizer;
 import org.openrdf.query.algebra.QueryRoot;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java
index ea6d438..5bc4a34 100644
--- a/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java
+++ b/pig/accumulo.pig/src/test/java/mvm/rya/accumulo/pig/StatementPatternStorageTest.java
@@ -1,25 +1,26 @@
 package mvm.rya.accumulo.pig;
 
 /*
- * #%L
- * mvm.rya.accumulo.pig
- * %%
- * Copyright (C) 2014 Rya
- * %%
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  * 
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *   http://www.apache.org/licenses/LICENSE-2.0
  * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * #L%
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
  */
 
+
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/pom.xml
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/pom.xml b/pig/cloudbase.pig/pom.xml
deleted file mode 100644
index dfd44cf..0000000
--- a/pig/cloudbase.pig/pom.xml
+++ /dev/null
@@ -1,65 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>mvm.rya</groupId>
-        <artifactId>rya.pig</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>cloudbase.pig</artifactId>
-    <name>${project.groupId}.${project.artifactId}</name>
-    <dependencies>
-        <dependency>
-            <groupId>cloudbase</groupId>
-            <artifactId>cloudbase-core</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-common</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.zookeeper</groupId>
-            <artifactId>zookeeper</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>rya.sail.impl</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>mvm.rya</groupId>
-            <artifactId>cloudbase.rya</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.openrdf.sesame</groupId>
-            <artifactId>sesame-queryparser-sparql</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.pig</groupId>
-            <artifactId>pig</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.antlr</groupId>
-            <artifactId>antlr-runtime</artifactId>
-            <scope>provided</scope>
-        </dependency>
-    </dependencies>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-
-
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/CloudbaseStorage.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/CloudbaseStorage.java b/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/CloudbaseStorage.java
deleted file mode 100644
index bfda504..0000000
--- a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/CloudbaseStorage.java
+++ /dev/null
@@ -1,318 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import cloudbase.core.CBConstants;
-import cloudbase.core.client.mapreduce.CloudbaseInputFormat;
-import cloudbase.core.client.mapreduce.CloudbaseOutputFormat;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Mutation;
-import cloudbase.core.data.Range;
-import cloudbase.core.data.Value;
-import cloudbase.core.security.Authorizations;
-import cloudbase.core.security.ColumnVisibility;
-import cloudbase.core.util.Pair;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.*;
-import org.apache.pig.LoadFunc;
-import org.apache.pig.ResourceSchema;
-import org.apache.pig.StoreFuncInterface;
-import org.apache.pig.backend.executionengine.ExecException;
-import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
-import org.apache.pig.data.DataByteArray;
-import org.apache.pig.data.Tuple;
-import org.apache.pig.data.TupleFactory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * A LoadStoreFunc for retrieving data from and storing data to Accumulo
- * <p/>
- * A Key/Val pair will be returned as tuples: (key, colfam, colqual, colvis, timestamp, value). All fields except timestamp are DataByteArray, timestamp is a long.
- * <p/>
- * Tuples can be written in 2 forms:
- * (key, colfam, colqual, colvis, value)
- * OR
- * (key, colfam, colqual, value)
- */
-public class CloudbaseStorage extends LoadFunc implements StoreFuncInterface {
-    private static final Log logger = LogFactory.getLog(CloudbaseStorage.class);
-
-    protected Configuration conf;
-    protected RecordReader<Key, Value> reader;
-    protected RecordWriter<Text, Mutation> writer;
-
-    protected String inst;
-    protected String zookeepers;
-    protected String user = "";
-    protected String password = "";
-    protected String table;
-    protected Text tableName;
-    protected String auths;
-    protected Authorizations authorizations = CBConstants.NO_AUTHS;
-    protected List<Pair<Text, Text>> columnFamilyColumnQualifierPairs = new LinkedList<Pair<Text, Text>>();
-
-    protected Collection<Range> ranges = new ArrayList<Range>();
-    protected boolean mock = false;
-
-    public CloudbaseStorage() {
-    }
-
-    @Override
-    public Tuple getNext() throws IOException {
-        try {
-            // load the next pair
-            if (!reader.nextKeyValue()) {
-                logger.info("Reached end of results");
-                return null;
-            }
-
-            Key key = (Key) reader.getCurrentKey();
-            Value value = (Value) reader.getCurrentValue();
-            assert key != null && value != null;
-
-            if (logger.isTraceEnabled()) {
-                logger.trace("Found key[" + key + "] and value[" + value + "]");
-            }
-
-            // and wrap it in a tuple
-            Tuple tuple = TupleFactory.getInstance().newTuple(6);
-            tuple.set(0, new DataByteArray(key.getRow().getBytes()));
-            tuple.set(1, new DataByteArray(key.getColumnFamily().getBytes()));
-            tuple.set(2, new DataByteArray(key.getColumnQualifier().getBytes()));
-            tuple.set(3, new DataByteArray(key.getColumnVisibility().getBytes()));
-            tuple.set(4, key.getTimestamp());
-            tuple.set(5, new DataByteArray(value.get()));
-            if (logger.isTraceEnabled()) {
-                logger.trace("Output tuple[" + tuple + "]");
-            }
-            return tuple;
-        } catch (InterruptedException e) {
-            throw new IOException(e.getMessage());
-        }
-    }
-
-    @Override
-    public InputFormat getInputFormat() {
-        return new CloudbaseInputFormat();
-    }
-
-    @Override
-    public void prepareToRead(RecordReader reader, PigSplit split) {
-        this.reader = reader;
-    }
-
-    @Override
-    public void setLocation(String location, Job job) throws IOException {
-        if (logger.isDebugEnabled()) {
-            logger.debug("Set Location[" + location + "] for job[" + job.getJobName() + "]");
-        }
-        conf = job.getConfiguration();
-        setLocationFromUri(location, job);
-
-        if (!conf.getBoolean(CloudbaseInputFormat.class.getSimpleName() + ".configured", false)) {
-            CloudbaseInputFormat.setInputInfo(job, user, password.getBytes(), table, authorizations);
-            if (!mock) {
-                CloudbaseInputFormat.setZooKeeperInstance(job, inst, zookeepers);
-            } else {
-                CloudbaseInputFormat.setMockInstance(job, inst);
-            }
-        }
-        if (columnFamilyColumnQualifierPairs.size() > 0)
-            CloudbaseInputFormat.fetchColumns(job, columnFamilyColumnQualifierPairs);
-        logger.info("Set ranges[" + ranges + "] for job[" + job.getJobName() + "] on table[" + table + "] " +
-                "for columns[" + columnFamilyColumnQualifierPairs + "] with authorizations[" + authorizations + "]");
-
-        if (ranges.size() == 0) {
-            throw new IOException("Cloudbase Range must be specified");
-        }
-        CloudbaseInputFormat.setRanges(job, ranges);
-    }
-
-    protected void setLocationFromUri(String uri, Job job) throws IOException {
-        // ex: cloudbase://table1?instance=myinstance&user=root&password=secret&zookeepers=127.0.0.1:2181&auths=PRIVATE,PUBLIC&columns=col1|cq1,col2|cq2&range=a|z&range=1|9&mock=true
-        try {
-            if (!uri.startsWith("cloudbase://"))
-                throw new Exception("Bad scheme.");
-            String[] urlParts = uri.split("\\?");
-            setLocationFromUriParts(urlParts);
-
-        } catch (Exception e) {
-            throw new IOException("Expected 'cloudbase://<table>[?instance=<instanceName>&user=<user>&password=<password>&zookeepers=<zookeepers>&auths=<authorizations>&[range=startRow|endRow[...],columns=[cf1|cq1,cf2|cq2,...]],mock=true(false)]': " + e.getMessage(), e);
-        }
-    }
-
-    protected void setLocationFromUriParts(String[] urlParts) {
-        String columns = "";
-        if (urlParts.length > 1) {
-            for (String param : urlParts[1].split("&")) {
-                String[] pair = param.split("=");
-                if (pair[0].equals("instance")) {
-                    inst = pair[1];
-                } else if (pair[0].equals("user")) {
-                    user = pair[1];
-                } else if (pair[0].equals("password")) {
-                    password = pair[1];
-                } else if (pair[0].equals("zookeepers")) {
-                    zookeepers = pair[1];
-                } else if (pair[0].equals("auths")) {
-                    auths = pair[1];
-                } else if (pair[0].equals("columns")) {
-                    columns = pair[1];
-                } else if (pair[0].equals("range")) {
-                    String[] r = pair[1].split("\\|");
-                    if (r.length == 2) {
-                        addRange(new Range(r[0], r[1]));
-                    } else {
-                        addRange(new Range(r[0]));
-                    }
-                } else if (pair[0].equals("mock")) {
-                    this.mock = Boolean.parseBoolean(pair[1]);
-                }
-                addLocationFromUriPart(pair);
-            }
-        }
-        String[] parts = urlParts[0].split("/+");
-        table = parts[1];
-        tableName = new Text(table);
-
-        if (auths == null || auths.equals("")) {
-            authorizations = new Authorizations();
-        } else {
-            authorizations = new Authorizations(auths.split(","));
-        }
-
-        if (!columns.equals("")) {
-            for (String cfCq : columns.split(",")) {
-                if (cfCq.contains("|")) {
-                    String[] c = cfCq.split("\\|");
-                    String cf = c[0];
-                    String cq = c[1];
-                    addColumnPair(cf, cq);
-                } else {
-                    addColumnPair(cfCq, null);
-                }
-            }
-        }
-    }
-
-    protected void addColumnPair(String cf, String cq) {
-        columnFamilyColumnQualifierPairs.add(new Pair<Text, Text>((cf != null) ? new Text(cf) : null, (cq != null) ? new Text(cq) : null));
-    }
-
-    protected void addLocationFromUriPart(String[] pair) {
-
-    }
-
-    protected void addRange(Range range) {
-        ranges.add(range);
-    }
-
-    @Override
-    public String relativeToAbsolutePath(String location, Path curDir) throws IOException {
-        return location;
-    }
-
-    @Override
-    public void setUDFContextSignature(String signature) {
-
-    }
-
-    /* StoreFunc methods */
-    public void setStoreFuncUDFContextSignature(String signature) {
-
-    }
-
-    public String relToAbsPathForStoreLocation(String location, Path curDir) throws IOException {
-        return relativeToAbsolutePath(location, curDir);
-    }
-
-    public void setStoreLocation(String location, Job job) throws IOException {
-        conf = job.getConfiguration();
-        setLocationFromUri(location, job);
-
-        if (!conf.getBoolean(CloudbaseOutputFormat.class.getSimpleName() + ".configured", false)) {
-            CloudbaseOutputFormat.setOutputInfo(job, user, password.getBytes(), true, table);
-            CloudbaseOutputFormat.setZooKeeperInstance(job, inst, zookeepers);
-            CloudbaseOutputFormat.setMaxLatency(job, 10 * 1000);
-            CloudbaseOutputFormat.setMaxMutationBufferSize(job, 10 * 1000 * 1000);
-            CloudbaseOutputFormat.setMaxWriteThreads(job, 10);
-        }
-    }
-
-    public OutputFormat getOutputFormat() {
-        return new CloudbaseOutputFormat();
-    }
-
-    public void checkSchema(ResourceSchema schema) throws IOException {
-        // we don't care about types, they all get casted to ByteBuffers
-    }
-
-    public void prepareToWrite(RecordWriter writer) {
-        this.writer = writer;
-    }
-
-    public void putNext(Tuple t) throws ExecException, IOException {
-        Mutation mut = new Mutation(objToText(t.get(0)));
-        Text cf = objToText(t.get(1));
-        Text cq = objToText(t.get(2));
-
-        if (t.size() > 4) {
-            Text cv = objToText(t.get(3));
-            Value val = new Value(objToBytes(t.get(4)));
-            if (cv.getLength() == 0) {
-                mut.put(cf, cq, val);
-            } else {
-                mut.put(cf, cq, new ColumnVisibility(cv), val);
-            }
-        } else {
-            Value val = new Value(objToBytes(t.get(3)));
-            mut.put(cf, cq, val);
-        }
-
-        try {
-            writer.write(tableName, mut);
-        } catch (InterruptedException e) {
-            throw new IOException(e);
-        }
-    }
-
-    private static Text objToText(Object o) {
-        return new Text(objToBytes(o));
-    }
-
-    private static byte[] objToBytes(Object o) {
-        if (o instanceof String) {
-            String str = (String) o;
-            return str.getBytes();
-        } else if (o instanceof Long) {
-            Long l = (Long) o;
-            return l.toString().getBytes();
-        } else if (o instanceof Integer) {
-            Integer l = (Integer) o;
-            return l.toString().getBytes();
-        } else if (o instanceof Boolean) {
-            Boolean l = (Boolean) o;
-            return l.toString().getBytes();
-        } else if (o instanceof Float) {
-            Float l = (Float) o;
-            return l.toString().getBytes();
-        } else if (o instanceof Double) {
-            Double l = (Double) o;
-            return l.toString().getBytes();
-        }
-
-        // TODO: handle DataBag, Map<Object, Object>, and Tuple
-
-        return ((DataByteArray) o).get();
-    }
-
-    public void cleanupOnFailure(String failure, Job job) {
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngine.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngine.java b/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngine.java
deleted file mode 100644
index daf6c4f..0000000
--- a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlQueryPigEngine.java
+++ /dev/null
@@ -1,237 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import cloudbase.core.client.Connector;
-import cloudbase.core.client.ZooKeeperInstance;
-import com.google.common.base.Preconditions;
-import com.google.common.io.ByteStreams;
-import mvm.rya.cloudbase.CloudbaseRdfConfiguration;
-import mvm.rya.cloudbase.CloudbaseRdfEvalStatsDAO;
-import mvm.rya.cloudbase.CloudbaseRyaDAO;
-import mvm.rya.cloudbase.pig.optimizer.SimilarVarJoinOptimizer;
-import mvm.rya.rdftriplestore.evaluation.QueryJoinOptimizer;
-import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-import mvm.rya.rdftriplestore.inference.InverseOfVisitor;
-import mvm.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
-import mvm.rya.rdftriplestore.inference.TransitivePropertyVisitor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.pig.ExecType;
-import org.apache.pig.PigServer;
-import org.openrdf.query.algebra.QueryRoot;
-import org.openrdf.query.parser.ParsedQuery;
-import org.openrdf.query.parser.QueryParser;
-import org.openrdf.query.parser.sparql.SPARQLParser;
-
-import java.io.ByteArrayInputStream;
-import java.io.FileInputStream;
-import java.io.IOException;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/23/12
- * Time: 9:31 AM
- * To change this template use File | Settings | File Templates.
- */
-public class SparqlQueryPigEngine {
-    private static final Log logger = LogFactory.getLog(SparqlQueryPigEngine.class);
-
-    private String hadoopDir;
-    private ExecType execType = ExecType.MAPREDUCE; //default to mapreduce
-    private boolean inference = true;
-    private boolean stats = true;
-    private SparqlToPigTransformVisitor sparqlToPigTransformVisitor;
-    private PigServer pigServer;
-    private InferenceEngine inferenceEngine = null;
-    private RdfCloudTripleStoreEvaluationStatistics rdfCloudTripleStoreEvaluationStatistics;
-    private CloudbaseRyaDAO ryaDAO;
-    private CloudbaseRdfEvalStatsDAO rdfEvalStatsDAO;
-
-    public CloudbaseRdfConfiguration getConf() {
-        return conf;
-    }
-
-    public void setConf(CloudbaseRdfConfiguration conf) {
-        this.conf = conf;
-    }
-
-    CloudbaseRdfConfiguration conf = new CloudbaseRdfConfiguration();
-
-    public void init() throws Exception {
-        Preconditions.checkNotNull(sparqlToPigTransformVisitor, "Sparql To Pig Transform Visitor must not be null");
-        logger.info("Initializing Sparql Query Pig Engine");
-        if (hadoopDir != null) {
-            //set hadoop dir property
-            System.setProperty("HADOOPDIR", hadoopDir);
-        }
-        //TODO: Maybe have validation of the HadoopDir system property
-
-        if (pigServer == null) {
-            pigServer = new PigServer(execType);
-        }
-
-        if (inference || stats) {
-            Connector connector = new ZooKeeperInstance("stratus", "stratus13:2181").getConnector("root", "password".getBytes());
-
-            String tablePrefix = sparqlToPigTransformVisitor.getTablePrefix();
-            conf.setTablePrefix(tablePrefix);
-            if (inference) {
-                logger.info("Using inference");
-                inferenceEngine = new InferenceEngine();
-                ryaDAO = new CloudbaseRyaDAO();
-                ryaDAO.setConf(conf);
-                ryaDAO.setConnector(connector);
-                ryaDAO.init();
-
-                inferenceEngine.setRyaDAO(ryaDAO);
-                inferenceEngine.setConf(conf);
-                inferenceEngine.setSchedule(false);
-                inferenceEngine.init();
-            }
-            if (stats) {
-                logger.info("Using stats");
-                rdfEvalStatsDAO = new CloudbaseRdfEvalStatsDAO();
-                rdfEvalStatsDAO.setConf(conf);
-                rdfEvalStatsDAO.setConnector(connector);
-//                rdfEvalStatsDAO.setEvalTable(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX);
-                rdfEvalStatsDAO.init();
-                rdfCloudTripleStoreEvaluationStatistics = new RdfCloudTripleStoreEvaluationStatistics(conf, rdfEvalStatsDAO);
-            }
-        }
-    }
-
-    public void destroy() throws Exception {
-        logger.info("Shutting down Sparql Query Pig Engine");
-        pigServer.shutdown();
-        if (ryaDAO != null) {
-            ryaDAO.destroy();
-        }
-        if (inferenceEngine != null) {
-            inferenceEngine.destroy();
-        }
-        if (rdfEvalStatsDAO != null) {
-            rdfEvalStatsDAO.destroy();
-        }
-    }
-
-    /**
-     * Transform a sparql query into a pig script and execute it. Save results in hdfsSaveLocation
-     *
-     * @param sparql           to execute
-     * @param hdfsSaveLocation to save the execution
-     * @throws IOException
-     */
-    public void runQuery(String sparql, String hdfsSaveLocation) throws IOException {
-        Preconditions.checkNotNull(sparql, "Sparql query cannot be null");
-        Preconditions.checkNotNull(hdfsSaveLocation, "Hdfs save location cannot be null");
-        logger.info("Running query[" + sparql + "]\n to Location[" + hdfsSaveLocation + "]");
-        pigServer.deleteFile(hdfsSaveLocation);
-        try {
-            String pigScript = generatePigScript(sparql);
-            if (logger.isDebugEnabled()) {
-                logger.debug("Pig script [" + pigScript + "]");
-            }
-            pigServer.registerScript(new ByteArrayInputStream(pigScript.getBytes()));
-            pigServer.store("PROJ", hdfsSaveLocation); //TODO: Make this a constant
-        } catch (Exception e) {
-            throw new IOException(e);
-        }
-    }
-
-    public String generatePigScript(String sparql) throws Exception {
-        Preconditions.checkNotNull(sparql, "Sparql query cannot be null");
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(sparql, null);
-        QueryRoot tupleExpr = new QueryRoot(parsedQuery.getTupleExpr());
-
-        SimilarVarJoinOptimizer similarVarJoinOptimizer = new SimilarVarJoinOptimizer();
-        similarVarJoinOptimizer.optimize(tupleExpr, null, null);
-
-        if (inference || stats) {
-            if (inference) {
-                tupleExpr.visit(new TransitivePropertyVisitor(conf, inferenceEngine));
-                tupleExpr.visit(new SymmetricPropertyVisitor(conf, inferenceEngine));
-                tupleExpr.visit(new InverseOfVisitor(conf, inferenceEngine));
-            }
-            if (stats) {
-                (new QueryJoinOptimizer(rdfCloudTripleStoreEvaluationStatistics)).optimize(tupleExpr, null, null);
-            }
-        }
-
-        sparqlToPigTransformVisitor.meet(tupleExpr);
-        return sparqlToPigTransformVisitor.getPigScript();
-    }
-
-    public static void main(String[] args) {
-        try {
-            Preconditions.checkArgument(args.length == 7, "Usage: java -cp <jar>:$PIG_LIB <class> sparqlFile hdfsSaveLocation cbinstance cbzk cbuser cbpassword rdfTablePrefix.\n " +
-                    "Sample command: java -cp java -cp cloudbase.pig-2.0.0-SNAPSHOT-shaded.jar:/usr/local/hadoop-etc/hadoop-0.20.2/hadoop-0.20.2-core.jar:/srv_old/hdfs-tmp/pig/pig-0.9.2/pig-0.9.2.jar:$HADOOP_HOME/conf mvm.rya.cloudbase.pig.SparqlQueryPigEngine tstSpqrl.query temp/engineTest stratus stratus13:2181 root password l_");
-            String sparql = new String(ByteStreams.toByteArray(new FileInputStream(args[0])));
-            String hdfsSaveLocation = args[1];
-            SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
-            visitor.setTablePrefix(args[6]);
-            visitor.setInstance(args[2]);
-            visitor.setZk(args[3]);
-            visitor.setUser(args[4]);
-            visitor.setPassword(args[5]);
-
-            SparqlQueryPigEngine engine = new SparqlQueryPigEngine();
-            engine.setSparqlToPigTransformVisitor(visitor);
-            engine.init();
-
-            engine.runQuery(sparql, hdfsSaveLocation);
-
-            engine.destroy();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    public String getHadoopDir() {
-        return hadoopDir;
-    }
-
-    public void setHadoopDir(String hadoopDir) {
-        this.hadoopDir = hadoopDir;
-    }
-
-    public PigServer getPigServer() {
-        return pigServer;
-    }
-
-    public void setPigServer(PigServer pigServer) {
-        this.pigServer = pigServer;
-    }
-
-    public ExecType getExecType() {
-        return execType;
-    }
-
-    public void setExecType(ExecType execType) {
-        this.execType = execType;
-    }
-
-    public boolean isInference() {
-        return inference;
-    }
-
-    public void setInference(boolean inference) {
-        this.inference = inference;
-    }
-
-    public boolean isStats() {
-        return stats;
-    }
-
-    public void setStats(boolean stats) {
-        this.stats = stats;
-    }
-
-    public SparqlToPigTransformVisitor getSparqlToPigTransformVisitor() {
-        return sparqlToPigTransformVisitor;
-    }
-
-    public void setSparqlToPigTransformVisitor(SparqlToPigTransformVisitor sparqlToPigTransformVisitor) {
-        this.sparqlToPigTransformVisitor = sparqlToPigTransformVisitor;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitor.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitor.java b/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitor.java
deleted file mode 100644
index e037597..0000000
--- a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/SparqlToPigTransformVisitor.java
+++ /dev/null
@@ -1,323 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import org.openrdf.model.Literal;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.query.algebra.*;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * Date: 4/12/12
- * Time: 10:17 AM
- * To change this template use File | Settings | File Templates.
- */
-public class SparqlToPigTransformVisitor extends QueryModelVisitorBase<RuntimeException> {
-    private StringBuilder pigScriptBuilder = new StringBuilder();
-    private String tablePrefix;
-    private String instance, zk, user, password; //TODO: use a Configuration object to get these
-
-    private Map<String, String> varToSet = new HashMap<String, String>();
-    private Map<TupleExpr, List<String>> exprToNames = new HashMap<TupleExpr, List<String>>();
-    private Map<TupleExpr, String> exprToVar = new HashMap<TupleExpr, String>();
-
-    private char i = 'A'; //TODO: do better, hack
-
-    public SparqlToPigTransformVisitor() {
-        pigScriptBuilder.append("set pig.splitCombination false;\n")
-                .append("set default_parallel 13;\n") //TODO: set parallel properly
-                .append("set mapred.map.tasks.speculative.execution false;\n")
-                .append("set mapred.reduce.tasks.speculative.execution false;\n")
-                .append("set io.sort.mb 256;\n")
-                .append("set mapred.compress.map.output true;\n")
-                .append("set mapred.map.output.compression.codec org.apache.hadoop.io.compress.GzipCodec;\n")
-                .append("set io.file.buffer.size 65536;\n")
-                .append("set io.sort.factor 25;\n");
-    }
-
-    @Override
-    public void meet(StatementPattern node) throws RuntimeException {
-        super.meet(node);
-        String subjValue = getVarValue(node.getSubjectVar());
-        String predValue = getVarValue(node.getPredicateVar());
-        String objValue = getVarValue(node.getObjectVar());
-
-        String subj = i + "_s";
-        String pred = i + "_p";
-        String obj = i + "_o";
-        String var = i + "";
-        if (node.getSubjectVar().getValue() == null) {                  //TODO: look nicer
-            subj = node.getSubjectVar().getName();
-            varToSet.put(subj, var);
-
-            addToExprToNames(node, subj);
-        }
-        if (node.getPredicateVar().getValue() == null) {                  //TODO: look nicer
-            pred = node.getPredicateVar().getName();
-            varToSet.put(pred, var);
-
-            addToExprToNames(node, pred);
-        }
-        if (node.getObjectVar().getValue() == null) {                  //TODO: look nicer
-            obj = node.getObjectVar().getName();
-            varToSet.put(obj, var);
-
-            addToExprToNames(node, obj);
-        }
-        if(node.getContextVar() != null && node.getContextVar().getValue() == null) {
-            String cntxtName = node.getContextVar().getName();
-            varToSet.put(cntxtName, var);
-            
-            addToExprToNames(node, cntxtName);
-        }
-        //load 'l_' using mvm.rya.cloudbase.pig.dep.StatementPatternStorage('<http://www.Department0.University0.edu>', '', '',
-        // 'stratus', 'stratus13:2181', 'root', 'password') AS (dept:chararray, p:chararray, univ:chararray);
-//        pigScriptBuilder.append(i).append(" = load '").append(tablePrefix).append("' using mvm.rya.cloudbase.pig.dep.StatementPatternStorage('")
-//                .append(subjValue).append("','").append(predValue).append("','").append(objValue).append("','").append(instance).append("','")
-//                .append(zk).append("','").append(user).append("','").append(password).append("') AS (").append(subj).append(":chararray, ")
-//                .append(pred).append(":chararray, ").append(obj).append(":chararray);\n");
-
-        //load 'cloudbase://tablePrefix?instance=myinstance&user=root&password=secret&zookeepers=127.0.0.1:2181&auths=PRIVATE,PUBLIC&subject=a&predicate=b&object=c'
-        //using mvm.rya.cloudbase.pig.StatementPatternStorage() AS (dept:chararray, p:chararray, univ:chararray);
-        pigScriptBuilder.append(i).append(" = load 'cloudbase://").append(tablePrefix).append("?instance=").append(instance).append("&user=").append(user)
-                .append("&password=").append(password).append("&zookeepers=").append(zk);
-        if (subjValue != null && subjValue.length() > 0) {
-            pigScriptBuilder.append("&subject=").append(subjValue);
-        }
-        if (predValue != null && predValue.length() > 0) {
-            pigScriptBuilder.append("&predicate=").append(predValue);
-        }
-        if (objValue != null && objValue.length() > 0) {
-            pigScriptBuilder.append("&object=").append(objValue);
-        }
-        if(node.getContextVar() != null && node.getContextVar().getValue() != null) {
-            pigScriptBuilder.append("&context=").append(getVarValue(node.getContextVar()));
-        }
-
-        pigScriptBuilder.append("' using mvm.rya.cloudbase.pig.StatementPatternStorage() AS (").append(subj).append(":chararray, ")
-                .append(pred).append(":chararray, ").append(obj).append(":chararray");
-        if(node.getContextVar() != null) {
-            Value cntxtValue = node.getContextVar().getValue();
-            String cntxtName = null;
-            if(cntxtValue == null) {
-                //use name
-                cntxtName = node.getContextVar().getName();
-            } else {
-                cntxtName = i + "_c";
-            }
-            pigScriptBuilder.append(", ").append(cntxtName).append(":chararray");
-        }
-        pigScriptBuilder.append(");\n");
-        //TODO: add auths
-
-        exprToVar.put(node, var);
-        i++;
-    }
-
-    private void addToExprToNames(TupleExpr node, String name) {
-        List<String> names = exprToNames.get(node);
-        if (names == null) {
-            names = new ArrayList<String>();
-            exprToNames.put(node, names);
-        }
-        names.add(name);
-    }
-
-    @Override
-    public void meet(Union node) throws RuntimeException {
-        super.meet(node);
-
-        TupleExpr leftArg = node.getLeftArg();
-        TupleExpr rightArg = node.getRightArg();
-        String left_var = exprToVar.get(leftArg);
-        String right_var = exprToVar.get(rightArg);
-        //Q = UNION ONSCHEMA B, P;
-        pigScriptBuilder.append(i).append(" = UNION ONSCHEMA ").append(left_var).append(", ").append(right_var).append(";\n");
-
-        String unionVar = i + "";
-        List<String> left_names = exprToNames.get(leftArg);
-        List<String> right_names = exprToNames.get(rightArg);
-        for (String name : left_names) {
-            varToSet.put(name, unionVar);
-            addToExprToNames(node, name);
-        }
-        for (String name : right_names) {
-            varToSet.put(name, unionVar);
-            addToExprToNames(node, name);
-        }
-        exprToVar.put(node, unionVar);
-        i++;
-    }
-
-    @Override
-    public void meet(Join node) throws RuntimeException {
-        super.meet(node);
-
-        TupleExpr leftArg = node.getLeftArg();
-        TupleExpr rightArg = node.getRightArg();
-        List<String> left_names = exprToNames.get(leftArg);
-        List<String> right_names = exprToNames.get(rightArg);
-
-        Set<String> joinNames = new HashSet<String>(left_names);
-        joinNames.retainAll(right_names); //intersection, this is what I join on
-        //SEC = join FIR by (MEMB_OF::ugrad, SUBORG_J::univ), UGRADDEG by (ugrad, univ);
-        StringBuilder joinStr = new StringBuilder();
-        joinStr.append("(");
-        boolean first = true;
-        for (String name : joinNames) { //TODO: Make this a utility method
-            if (!first) {
-                joinStr.append(",");
-            }
-            first = false;
-            joinStr.append(name);
-        }
-        joinStr.append(")");
-
-        String left_var = exprToVar.get(leftArg);
-        String right_var = exprToVar.get(rightArg);
-        if (joinStr.length() <= 2) {
-            //no join params, need to cross
-            pigScriptBuilder.append(i).append(" = cross ").append(left_var).append(", ").append(right_var).append(";\n");
-        } else {
-            //join
-            pigScriptBuilder.append(i).append(" = join ").append(left_var);
-            pigScriptBuilder.append(" by ").append(joinStr);
-            pigScriptBuilder.append(", ").append(right_var);
-            pigScriptBuilder.append(" by ").append(joinStr);
-            pigScriptBuilder.append(";\n");
-
-        }
-
-        String joinVarStr = i + "";
-        i++;
-        // D = foreach C GENERATE A::subj AS subj:chararray, A::A_p AS p:chararray;
-        String forEachVarStr = i + "";
-        pigScriptBuilder.append(i).append(" = foreach ").append(joinVarStr).append(" GENERATE ");
-        Map<String, String> nameToJoinName = new HashMap<String, String>();
-        for (String name : left_names) {
-            varToSet.put(name, forEachVarStr);
-            addToExprToNames(node, name);
-            nameToJoinName.put(name, left_var + "::" + name);
-        }
-        for (String name : right_names) {
-            varToSet.put(name, forEachVarStr);
-            addToExprToNames(node, name);
-            nameToJoinName.put(name, right_var + "::" + name);
-        }
-
-        first = true;
-        for (Map.Entry entry : nameToJoinName.entrySet()) {
-            if (!first) {
-                pigScriptBuilder.append(",");
-            }
-            first = false;
-            pigScriptBuilder.append(entry.getValue()).append(" AS ").append(entry.getKey()).append(":chararray ");
-        }
-        pigScriptBuilder.append(";\n");
-
-        exprToVar.put(node, forEachVarStr);
-        i++;
-    }
-
-    @Override
-    public void meet(Projection node) throws RuntimeException {
-        super.meet(node);
-        ProjectionElemList list = node.getProjectionElemList();
-        String set = null;
-        StringBuilder projList = new StringBuilder();
-        boolean first = true;
-        //TODO: we do not support projections from multiple pig statements yet
-        for (String name : list.getTargetNames()) {
-            set = varToSet.get(name);  //TODO: overwrite
-            if (set == null) {
-                throw new IllegalArgumentException("Have not found any pig logic for name[" + name + "]");
-            }
-            if (!first) {
-                projList.append(",");
-            }
-            first = false;
-            projList.append(name);
-        }
-        if (set == null)
-            throw new IllegalArgumentException(""); //TODO: Fill this
-        //SUBORG = FOREACH SUBORG_L GENERATE dept, univ;
-        pigScriptBuilder.append("PROJ = FOREACH ").append(set).append(" GENERATE ").append(projList.toString()).append(";\n");
-    }
-
-    @Override
-    public void meet(Slice node) throws RuntimeException {
-        super.meet(node);
-        long limit = node.getLimit();
-        //PROJ = LIMIT PROJ 10;
-        pigScriptBuilder.append("PROJ = LIMIT PROJ ").append(limit).append(";\n");
-    }
-
-    public String getPassword() {
-        return password;
-    }
-
-    public void setPassword(String password) {
-        this.password = password;
-    }
-
-    public String getUser() {
-        return user;
-    }
-
-    public void setUser(String user) {
-        this.user = user;
-    }
-
-    public String getZk() {
-        return zk;
-    }
-
-    public void setZk(String zk) {
-        this.zk = zk;
-    }
-
-    public String getInstance() {
-        return instance;
-    }
-
-    public void setInstance(String instance) {
-        this.instance = instance;
-    }
-
-    public String getTablePrefix() {
-        return tablePrefix;
-    }
-
-    public void setTablePrefix(String tablePrefix) {
-        this.tablePrefix = tablePrefix;
-    }
-
-    public String getPigScript() {
-        return pigScriptBuilder.toString();
-    }
-
-    protected String getVarValue(Var var) {
-        if (var == null) {
-            return "";
-        } else {
-            Value value = var.getValue();
-            if (value == null) {
-                return "";
-            }
-            if (value instanceof URI) {
-                return "<" + value.stringValue() + ">";
-            }
-            if (value instanceof Literal) {
-                Literal lit = (Literal) value;
-                if (lit.getDatatype() == null) {
-                    //string
-                    return "\\'" + value.stringValue() + "\\'";
-                }
-            }
-            return value.stringValue();
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/StatementPatternStorage.java
----------------------------------------------------------------------
diff --git a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/StatementPatternStorage.java b/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/StatementPatternStorage.java
deleted file mode 100644
index f9acb70..0000000
--- a/pig/cloudbase.pig/src/main/java/mvm/rya/cloudbase/pig/StatementPatternStorage.java
+++ /dev/null
@@ -1,278 +0,0 @@
-package mvm.rya.cloudbase.pig;
-
-import cloudbase.core.client.ZooKeeperInstance;
-import cloudbase.core.client.mock.MockInstance;
-import cloudbase.core.data.Key;
-import cloudbase.core.data.Range;
-import com.google.common.io.ByteArrayDataInput;
-import com.google.common.io.ByteStreams;
-import mvm.rya.api.RdfCloudTripleStoreConstants;
-import mvm.rya.api.RdfCloudTripleStoreUtils;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.query.strategy.ByteRange;
-import mvm.rya.api.query.strategy.TriplePatternStrategy;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.cloudbase.CloudbaseRdfConfiguration;
-import mvm.rya.cloudbase.CloudbaseRyaDAO;
-import mvm.rya.rdftriplestore.inference.InferenceEngine;
-import mvm.rya.rdftriplestore.inference.InferenceEngineException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.pig.data.Tuple;
-import org.apache.pig.data.TupleFactory;
-import org.openrdf.model.Resource;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-import org.openrdf.query.parser.ParsedQuery;
-import org.openrdf.query.parser.QueryParser;
-import org.openrdf.query.parser.sparql.SPARQLParser;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-import java.util.Set;
-
-import static mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-
-/**
- */
-public class StatementPatternStorage extends CloudbaseStorage {
-    private static final Log logger = LogFactory.getLog(StatementPatternStorage.class);
-    protected TABLE_LAYOUT layout;
-    protected String subject = "?s";
-    protected String predicate = "?p";
-    protected String object = "?o";
-    protected String context;
-    private Value subject_value;
-    private Value predicate_value;
-    private Value object_value;
-
-    private RyaContext ryaContext = RyaContext.getInstance();
-
-    /**
-     * whether to turn inferencing on or off
-     */
-    private boolean infer = true;
-
-    public StatementPatternStorage() {
-
-    }
-
-    private Value getValue(Var subjectVar) {
-        return subjectVar.hasValue() ? subjectVar.getValue() : null;
-    }
-
-    @Override
-    public void setLocation(String location, Job job) throws IOException {
-        super.setLocation(location, job);
-    }
-
-    @Override
-    protected void setLocationFromUri(String uri, Job job) throws IOException {
-        super.setLocationFromUri(uri, job);
-        // ex: cloudbase://tablePrefix?instance=myinstance&user=root&password=secret&zookeepers=127.0.0.1:2181&auths=PRIVATE,PUBLIC&subject=a&predicate=b&object=c&context=c&infer=true
-        addStatementPatternRange(subject, predicate, object, context);
-        if (infer) {
-            addInferredRanges(table, job);
-        }
-
-        if (layout == null || ranges.size() == 0)
-            throw new IllegalArgumentException("Range and/or layout is null. Check the query");
-        table = RdfCloudTripleStoreUtils.layoutPrefixToTable(layout, table);
-        tableName = new Text(table);
-    }
-
-    @Override
-    protected void addLocationFromUriPart(String[] pair) {
-        if (pair[0].equals("subject")) {
-            this.subject = pair[1];
-        } else if (pair[0].equals("predicate")) {
-            this.predicate = pair[1];
-        } else if (pair[0].equals("object")) {
-            this.object = pair[1];
-        } else if (pair[0].equals("context")) {
-            this.context = pair[1];
-        } else if (pair[0].equals("infer")) {
-            this.infer = Boolean.parseBoolean(pair[1]);
-        }
-    }
-
-    protected void addStatementPatternRange(String subj, String pred, String obj, String ctxt) throws IOException {
-        logger.info("Adding statement pattern[subject:" + subj + ", predicate:" + pred + ", object:" + obj + ", context:" + ctxt + "]");
-        StringBuilder sparqlBuilder = new StringBuilder();
-        sparqlBuilder.append("select * where {\n");
-        if (ctxt != null) {
-            /**
-             * select * where {
-             GRAPH ?g {
-             <http://www.example.org/exampleDocument#Monica> ?p ?o.
-             }
-             }
-             */
-            sparqlBuilder.append("GRAPH ").append(ctxt).append(" {\n");
-        }
-        sparqlBuilder.append(subj).append(" ").append(pred).append(" ").append(obj).append(".\n");
-        if (ctxt != null) {
-            sparqlBuilder.append("}\n");
-        }
-        sparqlBuilder.append("}\n");
-        String sparql = sparqlBuilder.toString();
-
-        if (logger.isDebugEnabled()) {
-            logger.debug("Sparql statement range[" + sparql + "]");
-        }
-
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = null;
-        try {
-            parsedQuery = parser.parseQuery(sparql, null);
-        } catch (MalformedQueryException e) {
-            throw new IOException(e);
-        }
-        parsedQuery.getTupleExpr().visitChildren(new QueryModelVisitorBase<IOException>() {
-            @Override
-            public void meet(StatementPattern node) throws IOException {
-                Var subjectVar = node.getSubjectVar();
-                Var predicateVar = node.getPredicateVar();
-                Var objectVar = node.getObjectVar();
-                subject_value = getValue(subjectVar);
-                predicate_value = getValue(predicateVar);
-                object_value = getValue(objectVar);
-                Map.Entry<TABLE_LAYOUT, Range> temp = createRange(subject_value, predicate_value, object_value);
-//                Map.Entry<TABLE_LAYOUT, Range> temp =
-//                        queryRangeFactory.defineRange(subject_value, predicate_value, object_value, null);
-                layout = temp.getKey();
-                Range range = temp.getValue();
-                addRange(range);
-                Var contextVar = node.getContextVar();
-                if (contextVar != null && contextVar.getValue() != null) {
-                    String context_str = contextVar.getValue().stringValue();
-                    addColumnPair(context_str, "");
-                }
-            }
-        });
-    }
-
-    protected Map.Entry<TABLE_LAYOUT, Range> createRange(Value s_v, Value p_v, Value o_v) throws IOException {
-        RyaURI subject_rya = RdfToRyaConversions.convertResource((Resource) s_v);
-        RyaURI predicate_rya = RdfToRyaConversions.convertURI((URI) p_v);
-        RyaType object_rya = RdfToRyaConversions.convertValue(o_v);
-        TriplePatternStrategy strategy = ryaContext.retrieveStrategy(subject_rya, predicate_rya, object_rya, null);
-        if (strategy == null)
-            return new RdfCloudTripleStoreUtils.CustomEntry<TABLE_LAYOUT, Range>(TABLE_LAYOUT.SPO, new Range());
-        Map.Entry<TABLE_LAYOUT, ByteRange> entry = strategy.defineRange(subject_rya, predicate_rya, object_rya, null, null);
-        ByteRange byteRange = entry.getValue();
-        return new RdfCloudTripleStoreUtils.CustomEntry<mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT, Range>(
-                entry.getKey(), new Range(new Text(byteRange.getStart()), new Text(byteRange.getEnd()))
-        );
-    }
-
-    protected void addInferredRanges(String tablePrefix, Job job) throws IOException {
-        logger.info("Adding inferences to statement pattern[subject:" + subject_value + ", predicate:" + predicate_value + ", object:" + object_value + "]");
-        //inference engine
-        CloudbaseRyaDAO ryaDAO = new CloudbaseRyaDAO();
-        CloudbaseRdfConfiguration rdfConf = new CloudbaseRdfConfiguration(job.getConfiguration());
-        rdfConf.setTablePrefix(tablePrefix);
-        ryaDAO.setConf(rdfConf);
-        InferenceEngine inferenceEngine = new InferenceEngine();
-        inferenceEngine.setConf(rdfConf);
-        inferenceEngine.setRyaDAO(ryaDAO);
-        inferenceEngine.setSchedule(false);
-        try {
-            if (!mock) {
-                ryaDAO.setConnector(new ZooKeeperInstance(inst, zookeepers).getConnector(user, password.getBytes()));
-            } else {
-                ryaDAO.setConnector(new MockInstance(inst).getConnector(user, password.getBytes()));
-            }
-
-            ryaDAO.init();
-            inferenceEngine.init();
-            //is it subclassof or subpropertyof
-            if (RDF.TYPE.equals(predicate_value)) {
-                //try subclassof
-                Collection<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubClassOfGraph(), (URI) object_value);
-                if (parents != null && parents.size() > 0) {
-                    //subclassof relationships found
-                    //don't add self, that will happen anyway later
-                    //add all relationships
-                    for (URI parent : parents) {
-                        Map.Entry<TABLE_LAYOUT, Range> temp = createRange(subject_value, predicate_value, parent);
-//                                queryRangeFactory.defineRange(subject_value, predicate_value, parent, rdfConf);
-                        Range range = temp.getValue();
-                        if (logger.isDebugEnabled()) {
-                            logger.debug("Found subClassOf relationship [type:" + object_value + " is subClassOf:" + parent + "]");
-                        }
-                        addRange(range);
-                    }
-                }
-            } else if (predicate_value != null) {
-                //subpropertyof check
-                Set<URI> parents = inferenceEngine.findParents(inferenceEngine.getSubPropertyOfGraph(), (URI) predicate_value);
-                for (URI parent : parents) {
-                    Map.Entry<TABLE_LAYOUT, Range> temp = createRange(subject_value, parent, object_value);
-//                            queryRangeFactory.defineRange(subject_value, parent, object_value, rdfConf);
-                    Range range = temp.getValue();
-                    if (logger.isDebugEnabled()) {
-                        logger.debug("Found subPropertyOf relationship [type:" + predicate_value + " is subPropertyOf:" + parent + "]");
-                    }
-                    addRange(range);
-                }
-            }
-        } catch (Exception e) {
-            throw new IOException(e);
-        } finally {
-            if (inferenceEngine != null) {
-                try {
-                    inferenceEngine.destroy();
-                } catch (InferenceEngineException e) {
-                    throw new IOException(e);
-                }
-            }
-
-            if (ryaDAO != null)
-                try {
-                    ryaDAO.destroy();
-                } catch (RyaDAOException e) {
-                    throw new IOException(e);
-                }
-        }
-
-    }
-
-    @Override
-    public Tuple getNext() throws IOException {
-        try {
-            if (reader.nextKeyValue()) {
-                Key key = (Key) reader.getCurrentKey();
-                cloudbase.core.data.Value value = (cloudbase.core.data.Value) reader.getCurrentValue();
-                ByteArrayDataInput input = ByteStreams.newDataInput(key.getRow().getBytes());
-                RyaStatement ryaStatement = ryaContext.deserializeTriple(layout, new TripleRow(key.getRow().getBytes(),
-                        key.getColumnFamily().getBytes(), key.getColumnQualifier().getBytes()));
-//                        RdfCloudTripleStoreUtils.translateStatementFromRow(input,
-//                        key.getColumnFamily(), layout, RdfCloudTripleStoreConstants.VALUE_FACTORY);
-
-                Tuple tuple = TupleFactory.getInstance().newTuple(4);
-                tuple.set(0, ryaStatement.getSubject().getData());
-                tuple.set(1, ryaStatement.getPredicate().getData());
-                tuple.set(2, ryaStatement.getObject().getData());
-                tuple.set(3, (ryaStatement.getContext() != null) ? (ryaStatement.getContext().getData()) : (null));
-                return tuple;
-            }
-        } catch (Exception e) {
-            throw new IOException(e);
-        }
-        return null;
-    }
-}


[04/56] [abbrv] incubator-rya git commit: RYA-7 POM and License Clean-up for Apache Move

Posted by mi...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java
new file mode 100644
index 0000000..eee6bce
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreTest.java
@@ -0,0 +1,699 @@
+package mvm.rya;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import junit.framework.TestCase;
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.RdfCloudTripleStoreConstants;
+import mvm.rya.rdftriplestore.RdfCloudTripleStore;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.openrdf.model.Namespace;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.query.*;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.RepositoryResult;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+
+import javax.xml.datatype.DatatypeConfigurationException;
+import javax.xml.datatype.DatatypeFactory;
+import java.util.GregorianCalendar;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Class PartitionConnectionTest
+ * Date: Jul 6, 2011
+ * Time: 5:24:07 PM
+ */
+public class RdfCloudTripleStoreTest extends TestCase {
+    public static final String NAMESPACE = "http://here/2010/tracked-data-provenance/ns#";//44 len
+    public static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
+    public static final String HBNAMESPACE = "http://here/2010/tracked-data-provenance/heartbeat/ns#";
+    public static final String HB_TIMESTAMP = HBNAMESPACE + "timestamp";
+
+    private SailRepository repository;
+    private SailRepositoryConnection connection;
+
+    ValueFactory vf = ValueFactoryImpl.getInstance();
+
+    private String objectUuid = "objectuuid1";
+    private String ancestor = "ancestor1";
+    private String descendant = "descendant1";
+    private static final long START = 1309532965000l;
+    private static final long END = 1310566686000l;
+    private Connector connector;
+
+    @Override
+    protected void setUp() throws Exception {
+        super.setUp();
+        connector = new MockInstance().getConnector("", "");
+
+        RdfCloudTripleStore sail = new RdfCloudTripleStore();
+        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+        conf.setTablePrefix("lubm_");
+        sail.setConf(conf);
+        AccumuloRyaDAO crdfdao = new AccumuloRyaDAO();
+        crdfdao.setConnector(connector);
+        crdfdao.setConf(conf);
+        sail.setRyaDAO(crdfdao);
+
+        repository = new SailRepository(sail);
+        repository.initialize();
+        connection = repository.getConnection();
+
+        loadData();
+    }
+
+    private void loadData() throws RepositoryException, DatatypeConfigurationException {
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, objectUuid), vf.createURI(NAMESPACE, "name"), vf.createLiteral("objUuid")));
+        //created
+        String uuid = "uuid1";
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Created")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "createdItem"), vf.createURI(NAMESPACE, objectUuid)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:A")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit1")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit2")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit3")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "stringLit"), vf.createLiteral("stringLit4")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit1")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit2")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "strLit1"), vf.createLiteral("strLit3")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 0, 0, 0, 0))));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 1, 0, 0, 0))));
+        //clicked
+        uuid = "uuid2";
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Clicked")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "clickedItem"), vf.createURI(NAMESPACE, objectUuid)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:B")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 2, 0, 0, 0))));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 3, 0, 0, 0))));
+        //deleted
+        uuid = "uuid3";
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Deleted")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "deletedItem"), vf.createURI(NAMESPACE, objectUuid)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:C")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 4, 0, 0, 0))));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 5, 0, 0, 0))));
+        //dropped
+        uuid = "uuid4";
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Dropped")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "droppedItem"), vf.createURI(NAMESPACE, objectUuid)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:D")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 6, 0, 0, 0))));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 7, 0, 0, 0))));
+        //received
+        uuid = "uuid5";
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Received")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "receivedItem"), vf.createURI(NAMESPACE, objectUuid)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:E")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 8, 0, 0, 0))));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 9, 0, 0, 0))));
+        //sent
+        uuid = "uuid6";
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Sent")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "sentItem"), vf.createURI(NAMESPACE, objectUuid)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:F")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 10, 0, 0, 0))));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 11, 0, 0, 0))));
+        //stored
+        uuid = "uuid7";
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(RDF_NS, "type"), vf.createURI(NAMESPACE, "Stored")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "storedItem"), vf.createURI(NAMESPACE, objectUuid)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedBy"), vf.createURI("urn:system:G")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "performedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 12, 0, 0, 0))));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, uuid), vf.createURI(NAMESPACE, "reportedAt"), vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(2011, 7, 12, 6, 13, 0, 0, 0))));
+
+        //derivedFrom
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "derivedFrom"), vf.createURI(NAMESPACE, ancestor)));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, descendant), vf.createURI(NAMESPACE, "name"), vf.createLiteral("descendantOne")));
+        connection.add(new StatementImpl(vf.createURI(NAMESPACE, ancestor), vf.createURI(NAMESPACE, "name"), vf.createLiteral("ancestor1")));
+
+        //heartbeats
+        String hbuuid = "hbuuid1";
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 1) + "")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(1 + "")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:A")));
+        connection.add(new StatementImpl(vf.createURI("urn:system:A"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
+
+        hbuuid = "hbuuid2";
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 2) + "")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(2 + "")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:B")));
+        connection.add(new StatementImpl(vf.createURI("urn:system:B"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
+
+        hbuuid = "hbuuid3";
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(RDF_NS, "type"), vf.createURI(HBNAMESPACE, "HeartbeatMeasurement")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HB_TIMESTAMP), vf.createLiteral((START + 3) + "")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "count"), vf.createLiteral(3 + "")));
+        connection.add(new StatementImpl(vf.createURI(HBNAMESPACE, hbuuid), vf.createURI(HBNAMESPACE, "systemName"), vf.createURI("urn:system:C")));
+        connection.add(new StatementImpl(vf.createURI("urn:system:C"), vf.createURI(HBNAMESPACE, "heartbeat"), vf.createURI(HBNAMESPACE, hbuuid)));
+
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj2")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj3")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj1")));
+        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral("obj4")));
+
+        //Foreign Chars
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM))); 
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD))); 
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_TH))); 
+        connection.add(new StatementImpl(vf.createURI("urn:subj1"), vf.createURI("urn:pred"), vf.createLiteral(FAN_RN))); 
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM)));
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD)));
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_TH)));
+        connection.add(new StatementImpl(vf.createURI("urn:subj2"), vf.createURI("urn:pred"), vf.createLiteral(FAN_RN)));
+        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_SIM)));
+        connection.add(new StatementImpl(vf.createURI("urn:subj3"), vf.createURI("urn:pred"), vf.createLiteral(FAN_CH_TRAD)));
+        
+        connection.commit();
+    }
+
+    private static final String FAN_CH_SIM = "风扇";
+    private static final String FAN_CH_TRAD = "風扇";
+    private static final String FAN_TH = "แฟน";
+    private static final String FAN_RN = "вентилятор";
+    
+    @Override
+    protected void tearDown() throws Exception {
+        super.tearDown();
+        connection.close();
+        repository.shutDown();
+    }
+
+    protected String getXmlDate(long ts) throws DatatypeConfigurationException {
+        GregorianCalendar gregorianCalendar = new GregorianCalendar();
+        gregorianCalendar.setTimeInMillis(ts);
+        //"2011-07-12T05:12:00.000Z"^^xsd:dateTime
+        return "\"" + vf.createLiteral(DatatypeFactory.newInstance().newXMLGregorianCalendar(gregorianCalendar)).stringValue() + "\"^^xsd:dateTime";
+    }
+
+//    public void testScanAll() throws Exception {
+//        Scanner sc = connector.createScanner("lubm_spo", Constants.NO_AUTHS);
+//        for (Map.Entry<Key, Value> aSc : sc) System.out.println(aSc.getKey().getRow());
+//    }
+
+    public void testNamespace() throws Exception {
+        String namespace = "urn:testNamespace#";
+        String prefix = "pfx";
+        connection.setNamespace(prefix, namespace);
+
+        assertEquals(namespace, connection.getNamespace(prefix));
+    }
+
+    public void testValues() throws Exception {
+      String query = "SELECT DISTINCT ?entity WHERE {"
+              + "VALUES (?entity) { (<http://test/entity>) }" 
+              + "}";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+    
+    public void testGetNamespaces() throws Exception {
+        String namespace = "urn:testNamespace#";
+        String prefix = "pfx";
+        connection.setNamespace(prefix, namespace);
+
+        namespace = "urn:testNamespace2#";
+        prefix = "pfx2";
+        connection.setNamespace(prefix, namespace);
+
+        RepositoryResult<Namespace> result = connection.getNamespaces();
+        int count = 0;
+        while (result.hasNext()) {
+            result.next();
+            count++;
+        }
+
+        assertEquals(2, count);
+    }
+
+    public void testAddCommitStatement() throws Exception {
+        StatementImpl stmt = new StatementImpl(vf.createURI("urn:namespace#subj"), vf.createURI("urn:namespace#pred"), vf.createLiteral("object"));
+        connection.add(stmt);
+        connection.commit();
+    }
+
+    public void testSelectOnlyQuery() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "ns:uuid1 ns:createdItem ?cr.\n" +
+                "ns:uuid1 ns:reportedAt ?ra.\n" +
+                "ns:uuid1 ns:performedAt ?pa.\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testForeignSelectOnlyQuery() throws Exception {
+        String query;
+        query = "select * where { ?s <urn:pred> ?o }"; // hits po
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(20, tupleHandler.getCount());
+
+        query = "select * where { <urn:subj1> <urn:pred> ?o }"; //hits spo
+        tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(8, tupleHandler.getCount());
+
+        query = "select * where { ?s ?p '"+FAN_CH_SIM+"' }"; //hits osp
+        tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+}
+      
+
+
+    //provenance Queries//////////////////////////////////////////////////////////////////////
+
+    public void testEventInfo() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "   ns:uuid1 ?p ?o.\n" +
+                "}\n";
+
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//                tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(12, tupleHandler.getCount());
+    }
+
+    public void testAllAncestors() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "ns:" + descendant + " ns:derivedFrom ?dr.\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        //        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testAllDescendants() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "?ds ns:derivedFrom ns:" + ancestor + ".\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testEventsForUri() throws Exception {
+        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
+                "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
+                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
+                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
+                "select * where {\n" +
+                "{" +
+                "   ?s rdf:type ns:Created.\n" +
+                "   ?s ns:createdItem ns:objectuuid1.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Clicked.\n" +
+                "   ?s ns:clickedItem ns:objectuuid1.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Deleted.\n" +
+                "   ?s ns:deletedItem ns:objectuuid1.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Dropped.\n" +
+                "   ?s ns:droppedItem ns:objectuuid1.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Received.\n" +
+                "   ?s ns:receivedItem ns:objectuuid1.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Stored.\n" +
+                "   ?s ns:storedItem ns:objectuuid1.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Sent.\n" +
+                "   ?s ns:sentItem ns:objectuuid1.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
+//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
+//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
+//                tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(7, tupleHandler.getCount());
+    }
+
+    public void testAllEvents() throws Exception {
+        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
+                "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
+                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
+                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
+                "select * where {\n" +
+                "{" +
+                "   ?s rdf:type ns:Created.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Clicked.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Deleted.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Dropped.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Received.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Stored.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "UNION {" +
+                "   ?s rdf:type ns:Sent.\n" +
+                "   ?s ns:performedBy ?pb.\n" +
+                "   ?s ns:performedAt ?pa.\n" +
+                "   FILTER(mvm:range(?pa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
+//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
+//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
+//                tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(7, tupleHandler.getCount());
+//        System.out.println(tupleHandler.getCount());
+    }
+
+    public void testEventsBtwnSystems() throws Exception {  //TODO: How to do XMLDateTime ranges
+        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
+                "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
+                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
+                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
+                "select * where {\n" +
+                "   ?sendEvent rdf:type ns:Sent;\n" +
+                "              ns:sentItem ?objUuid;\n" +
+                "              ns:performedBy <urn:system:F>;\n" +
+                "              ns:performedAt ?spa.\n" +
+                "   ?recEvent rdf:type ns:Received;\n" +
+                "              ns:receivedItem ?objUuid;\n" +
+                "              ns:performedBy <urn:system:E>;\n" +
+                "              ns:performedAt ?rpa.\n" +
+//                "   FILTER(mvm:range(?spa, \"2011-07-12T05:12:00.000Z\"^^xsd:dateTime, \"2011-07-12T07:12:00.000Z\"^^xsd:dateTime))\n" +
+                "   FILTER(mvm:range(?spa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "   FILTER(mvm:range(?rpa, " + getXmlDate(START) + ", " + getXmlDate(END) + "))\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.setBinding(START_BINDING, vf.createLiteral(START));
+//        tupleQuery.setBinding(END_BINDING, vf.createLiteral(END));
+//        tupleQuery.setBinding(TIME_PREDICATE, vf.createURI(NAMESPACE, "performedAt"));
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testHeartbeatCounts() throws Exception {
+        String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
+                "PREFIX hns:<" + HBNAMESPACE + ">\n" +
+                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
+                "PREFIX mvm: <" + RdfCloudTripleStoreConstants.NAMESPACE + ">\n" +
+                "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" +
+                "select * where {\n" +
+                "   ?hb rdf:type hns:HeartbeatMeasurement;\n" +
+                "              hns:count ?count;\n" +
+                "              hns:timestamp ?ts;\n" +
+                "              hns:systemName ?systemName.\n" +
+                "   FILTER(mvm:range(?ts, \"" + START + "\", \"" + (START + 3) + "\"))\n" +
+                "}\n";
+//        System.out.println(query);
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, vf.createLiteral(true));
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+    }
+
+    //provenance Queries//////////////////////////////////////////////////////////////////////
+
+    public void testCreatedEvents() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "   ?s ns:createdItem ns:objectuuid1.\n" +
+                "   ?s ns:reportedAt ?ra.\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testSelectAllAfterFilter() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "   ?s ns:createdItem ns:objectuuid1.\n" +
+                "   ?s ?p ?o.\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(12, tupleHandler.getCount());
+    }
+
+    public void testFilterQuery() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "ns:uuid1 ns:createdItem ?cr.\n" +
+                "ns:uuid1 ns:stringLit ?sl.\n" +
+                "FILTER regex(?sl, \"stringLit1\")" +
+                "ns:uuid1 ns:reportedAt ?ra.\n" +
+                "ns:uuid1 ns:performedAt ?pa.\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        //        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testMultiplePredicatesMultipleBindingSets() throws Exception {
+        //MMRTS-121
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "select * where {\n" +
+                "?id ns:createdItem ns:objectuuid1.\n" +
+                "?id ns:stringLit ?sl.\n" +
+                "?id ns:strLit1 ?s2.\n" +
+                "}\n";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(12, tupleHandler.getCount());
+    }
+
+    public void testMultiShardLookupTimeRange() throws Exception {
+        //MMRTS-113
+        String query = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
+                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
+                "SELECT * WHERE\n" +
+                "{\n" +
+                "?id hb:timestamp ?timestamp.\n" +
+//                "FILTER(mvmpart:timeRange(?id, hb:timestamp, " + START + " , " + (START + 2) + " , 'TIMESTAMP'))\n" +
+                "?id hb:count ?count.\n" +
+                "?system hb:heartbeat ?id.\n" +
+                "}";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(3, tupleHandler.getCount());
+    }
+
+    public void testMultiShardLookupTimeRangeValueConst() throws Exception {
+        //MMRTS-113
+        String query = "PREFIX hb: <http://here/2010/tracked-data-provenance/heartbeat/ns#>\n" +
+                "PREFIX mvmpart: <urn:mvm.mmrts.partition.rdf/08/2011#>\n" +
+                "SELECT * WHERE\n" +
+                "{\n" +
+                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:timestamp ?timestamp.\n" +
+//                "FILTER(mvmpart:timeRange(<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2>, hb:timestamp, " + START + " , " + END + " , 'TIMESTAMP'))\n" +
+                "<http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2> hb:count ?count.\n" +
+                "?system hb:heartbeat <http://here/2010/tracked-data-provenance/heartbeat/ns#hbuuid2>.\n" +
+                "}";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testLinkQuery() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "SELECT * WHERE {\n" +
+                "     <http://here/2010/tracked-data-provenance/ns#uuid1> ns:createdItem ?o .\n" +
+                "     ?o ns:name ?n .\n" +
+                "}";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(1, tupleHandler.getCount());
+    }
+
+    public void testRangeOverDuplicateItems() throws Exception {
+        String query = "PREFIX ns:<" + NAMESPACE + ">\n" +
+                "SELECT * WHERE {\n" +
+                "     ?subj <urn:pred> \"obj2\" .\n" +
+                "}";
+        TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(new PrintTupleHandler());
+        CountTupleHandler tupleHandler = new CountTupleHandler();
+        tupleQuery.evaluate(tupleHandler);
+        assertEquals(2, tupleHandler.getCount());
+    }
+
+    private static class PrintTupleHandler implements TupleQueryResultHandler {
+
+        @Override
+        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+
+        }
+
+        @Override
+        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
+            System.out.println(bindingSet);
+        }
+
+        @Override
+        public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException {
+        }
+
+        @Override
+        public void handleLinks(List<String> paramList) throws QueryResultHandlerException {
+        }
+    }
+
+    private static class CountTupleHandler implements TupleQueryResultHandler {
+
+        int count = 0;
+
+        @Override
+        public void startQueryResult(List<String> strings) throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
+            count++;
+        }
+
+        public int getCount() {
+            return count;
+        }
+
+        @Override
+        public void handleBoolean(boolean paramBoolean) throws QueryResultHandlerException {
+        }
+
+        @Override
+        public void handleLinks(List<String> paramList) throws QueryResultHandlerException {
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java
new file mode 100644
index 0000000..de49ef2
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/RdfCloudTripleStoreUtilsTest.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+//package mvm.rya;
+
+//
+//import java.util.List;
+//
+//import junit.framework.TestCase;
+//
+//import org.openrdf.model.BNode;
+//import org.openrdf.model.Resource;
+//import org.openrdf.model.URI;
+//import org.openrdf.model.Value;
+//import org.openrdf.model.impl.ValueFactoryImpl;
+//
+//import com.google.common.io.ByteStreams;
+//
+//import static mvm.rya.api.RdfCloudTripleStoreUtils.*;
+//
+//public class RdfCloudTripleStoreUtilsTest extends TestCase {
+//
+//	public void testWriteReadURI() throws Exception {
+//		final ValueFactoryImpl vf = new ValueFactoryImpl();
+//		URI uri = vf.createURI("http://www.example.org/test/rel");
+//		byte[] value = writeValue(uri);
+//
+//		Value readValue = readValue(ByteStreams
+//				.newDataInput(value), vf);
+//		assertEquals(uri, readValue);
+//	}
+//
+//	public void testWriteReadBNode() throws Exception {
+//		final ValueFactoryImpl vf = new ValueFactoryImpl();
+//		Value val = vf.createBNode("bnodeid");
+//		byte[] value = writeValue(val);
+//
+//		Value readValue = readValue(ByteStreams
+//				.newDataInput(value), vf);
+//		assertEquals(val, readValue);
+//	}
+//
+//	public void testWriteReadLiteral() throws Exception {
+//		final ValueFactoryImpl vf = new ValueFactoryImpl();
+//		Value val = vf.createLiteral("myliteral");
+//		byte[] value = writeValue(val);
+//
+//		Value readValue = readValue(ByteStreams
+//				.newDataInput(value), vf);
+//		assertEquals(val, readValue);
+//	}
+//
+//	public void testContexts() throws Exception {
+//		final ValueFactoryImpl vf = new ValueFactoryImpl();
+//		BNode cont1 = vf.createBNode("cont1");
+//		BNode cont2 = vf.createBNode("cont2");
+//		BNode cont3 = vf.createBNode("cont3");
+//
+//		byte[] cont_bytes = writeContexts(cont1, cont2,
+//				cont3);
+//		final String cont = new String(cont_bytes);
+//		System.out.println(cont);
+//
+//		List<Resource> contexts = readContexts(cont_bytes,
+//				vf);
+//		for (Resource resource : contexts) {
+//			System.out.println(resource);
+//		}
+//	}
+//}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java
----------------------------------------------------------------------
diff --git a/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java b/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java
new file mode 100644
index 0000000..c97c854
--- /dev/null
+++ b/sail/src/test/java/mvm/rya/rdftriplestore/evaluation/QueryJoinSelectOptimizerTest.java
@@ -0,0 +1,992 @@
+package mvm.rya.rdftriplestore.evaluation;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.layout.TablePrefixLayoutStrategy;
+import mvm.rya.api.persist.RdfEvalStatsDAO;
+import mvm.rya.joinselect.AccumuloSelectivityEvalDAO;
+import mvm.rya.prospector.service.ProspectorServiceEvalStatsDAO;
+import mvm.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer;
+import mvm.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.BatchWriter;
+import org.apache.accumulo.core.client.BatchWriterConfig;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.Mutation;
+import org.apache.accumulo.core.data.Range;
+import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
+public class QueryJoinSelectOptimizerTest {
+
+  private static final String DELIM = "\u0000";
+  private final byte[] EMPTY_BYTE = new byte[0];
+  private final Value EMPTY_VAL = new Value(EMPTY_BYTE);
+
+  private String q1 = ""//
+      + "SELECT ?h  " //
+      + "{" //
+      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+      + "  ?h <uri:barksAt> <uri:cat> ."//
+      + "  ?h <uri:peesOn> <uri:hydrant> . "//
+      + "}";//
+
+  private String Q1 = ""//
+      + "SELECT ?h  " //
+      + "{" //
+      + "  ?h <uri:peesOn> <uri:hydrant> . "//
+      + "  ?h <uri:barksAt> <uri:cat> ."//
+      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+      + "}";//
+
+  private String q2 = ""//
+      + "SELECT ?h ?l ?m" //
+      + "{" //
+      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+      + "  ?h <uri:barksAt> <uri:cat> ."//
+      + "  ?h <uri:peesOn> <uri:hydrant> . "//
+      + "  ?m <uri:eats>  <uri:chickens>. " //
+      + "  ?m <uri:scratches> <uri:ears>. " //
+      + "}";//
+
+  private String Q2 = ""//
+      + "SELECT ?h ?l ?m" //
+      + "{" //
+      + "  ?h <uri:peesOn> <uri:hydrant> . "//
+      + "  ?h <uri:barksAt> <uri:cat> ."//
+      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+      + "  ?m <uri:eats>  <uri:chickens>. " //
+      + "  ?m <uri:scratches> <uri:ears>. " //
+      + "}";//
+
+  private String q3 = ""//
+      + "SELECT ?h ?l ?m" //
+      + "{" //
+      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+      + "  ?h <uri:barksAt> <uri:cat> ."//
+      + "  ?h <uri:peesOn> <uri:hydrant> . "//
+      + "  {?m <uri:eats>  <uri:chickens>} OPTIONAL {?m <uri:scratches> <uri:ears>}. " //
+      + "  {?m <uri:eats>  <uri:kibble>. ?m <uri:watches> <uri:television>.} UNION {?m <uri:rollsIn> <uri:mud>}. " //
+      + "  ?l <uri:runsIn> <uri:field> ."//
+      + "  ?l <uri:smells> <uri:butt> ."//
+      + "  ?l <uri:eats> <uri:sticks> ."//
+      + "}";//
+
+  private String Q4 = ""//
+      + "SELECT ?h ?l ?m" //
+      + "{" //
+      + "  ?h <uri:barksAt> <uri:cat> ."//
+      + "  ?m <uri:scratches> <uri:ears>. " //
+      + "  ?m <uri:eats>  <uri:chickens>. " //
+      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+      + "  ?h <uri:peesOn> <uri:hydrant> . "//
+      + "}";//
+
+  private String q5 = ""//
+      + "SELECT ?h ?l ?m" //
+      + "{" //
+      + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+      + "  ?h <uri:barksAt> <uri:cat> ."//
+      + "  ?h <uri:peesOn> <uri:hydrant> . "//
+      + "  {?m <uri:eats>  <uri:kibble>. ?m <uri:watches> <uri:television>.?m <uri:eats>  <uri:chickens>} " + "  UNION {?m <uri:rollsIn> <uri:mud>}. " //
+      + "  ?l <uri:runsIn> <uri:field> ."//
+      + "  ?l <uri:smells> <uri:butt> ."//
+      + "  ?l <uri:eats> <uri:sticks> ."//
+      + "}";//
+  
+  
+  private String q6 = ""//
+          + "SELECT ?h ?l ?m" //
+          + "{" //
+          + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> <uri:dog> ."//
+          + "  ?h <uri:barksAt> <uri:cat> ."//
+          + "  ?h <uri:peesOn> <uri:hydrant> . "//
+           + "  FILTER(?l = <uri:grover>) ." //
+          + "  {?m <uri:eats>  <uri:kibble>. ?m <uri:watches> <uri:television>.?m <uri:eats>  <uri:chickens>} " + "  UNION {?m <uri:rollsIn> <uri:mud>}. " //
+          + "  ?l <uri:runsIn> <uri:field> ."//
+          + "  ?l <uri:smells> <uri:butt> ."//
+          + "  ?l <uri:eats> <uri:sticks> ."//
+          + "}";//
+
+  private Connector conn;
+  AccumuloRdfConfiguration arc;
+  BatchWriterConfig config;
+  RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res;
+  Instance mock;
+
+  @Before
+  public void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
+
+    mock = new MockInstance("accumulo");
+    PasswordToken pToken = new PasswordToken("pass".getBytes());
+    conn = mock.getConnector("user", pToken);
+
+    config = new BatchWriterConfig();
+    config.setMaxMemory(1000);
+    config.setMaxLatency(1000, TimeUnit.SECONDS);
+    config.setMaxWriteThreads(10);
+
+    if (conn.tableOperations().exists("rya_prospects")) {
+      conn.tableOperations().delete("rya_prospects");
+    }
+    if (conn.tableOperations().exists("rya_selectivity")) {
+      conn.tableOperations().delete("rya_selectivity");
+    }
+
+    arc = new AccumuloRdfConfiguration();
+    arc.setTableLayoutStrategy(new TablePrefixLayoutStrategy());
+    arc.setMaxRangesForScanner(300);
+    res = new ProspectorServiceEvalStatsDAO(conn, arc);
+
+  }
+
+  @Test
+  public void testOptimizeQ1() throws Exception {
+
+    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+    accc.setConf(arc);
+    accc.setConnector(conn);
+    accc.setRdfEvalDAO(res);
+    accc.init();
+
+    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+    List<Mutation> mList = new ArrayList<Mutation>();
+    List<Mutation> mList2 = new ArrayList<Mutation>();
+    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
+    Mutation m1, m2, m3, m4;
+
+    m1 = new Mutation(s1 + DELIM + "3");
+    m1.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m2 = new Mutation(s2 + DELIM + "2");
+    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m3 = new Mutation(s3 + DELIM + "1");
+    m3.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+    mList.add(m1);
+    mList.add(m2);
+    mList.add(m3);
+
+    bw1.addMutations(mList);
+    bw1.close();
+
+    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getValue().get())));
+    }
+
+    m1 = new Mutation(s1);
+    m2 = new Mutation(s2);
+    m3 = new Mutation(s3);
+    m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+    m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+    int i = 2;
+    int j = 3;
+    int k = 4;
+    Long count1;
+    Long count2;
+    Long count3;
+
+    for (String s : sList) {
+      count1 = (long) i;
+      count2 = (long) j;
+      count3 = (long) k;
+      m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m3.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      i = 2 * i;
+      j = 2 * j;
+      k = 2 * k;
+    }
+    mList2.add(m1);
+    mList2.add(m2);
+    mList2.add(m3);
+    mList2.add(m4);
+    bw2.addMutations(mList2);
+    bw2.close();
+
+    scan = conn.createScanner("rya_selectivity", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+
+    }
+
+    TupleExpr te = getTupleExpr(q1);
+
+    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
+    System.out.println("Originial query is " + te);
+    qjs.optimize(te, null, null);
+    Assert.assertTrue(te.equals(getTupleExpr(Q1)));
+
+  }
+
+  @Test
+  public void testOptimizeQ2() throws Exception {
+
+    System.out.println("*********************QUERY2********************");
+
+    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+    accc.setConf(arc);
+    accc.setConnector(conn);
+    accc.setRdfEvalDAO(res);
+    accc.init();
+
+    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
+    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
+    List<Mutation> mList = new ArrayList<Mutation>();
+    List<Mutation> mList2 = new ArrayList<Mutation>();
+    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
+    Mutation m1, m2, m3, m4, m5, m6;
+
+    m1 = new Mutation(s1 + DELIM + "3");
+    m1.put(new Text("count"), new Text(""), new Value("4".getBytes()));
+    m2 = new Mutation(s2 + DELIM + "2");
+    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m3 = new Mutation(s3 + DELIM + "1");
+    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m4 = new Mutation(s4 + DELIM + "1");
+    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m5 = new Mutation(s5 + DELIM + "1");
+    m5.put(new Text("count"), new Text(""), new Value("5".getBytes()));
+    mList.add(m1);
+    mList.add(m2);
+    mList.add(m3);
+    mList.add(m4);
+    mList.add(m5);
+
+    bw1.addMutations(mList);
+    bw1.close();
+
+    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getValue().get())));
+    }
+
+    m1 = new Mutation(s1);
+    m2 = new Mutation(s2);
+    m3 = new Mutation(s3);
+    m4 = new Mutation(s4);
+    m5 = new Mutation(s5);
+    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+    int i = 2;
+    int j = 3;
+    int k = 4;
+    Long count1;
+    Long count2;
+    Long count3;
+
+    for (String s : sList) {
+      count1 = (long) i;
+      count2 = (long) j;
+      count3 = (long) k;
+      m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+
+      i = 2 * i;
+      j = 2 * j;
+      k = 2 * k;
+    }
+    mList2.add(m1);
+    mList2.add(m2);
+    mList2.add(m3);
+    mList2.add(m5);
+    mList2.add(m4);
+    mList2.add(m6);
+    bw2.addMutations(mList2);
+    bw2.close();
+
+    // scan = conn.createScanner("rya_selectivity" , new Authorizations());
+    // scan.setRange(new Range());
+    //
+    // for (Map.Entry<Key, Value> entry : scan) {
+    // System.out.println("Key row string is " + entry.getKey().getRow().toString());
+    // System.out.println("Key is " + entry.getKey());
+    // System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+    //
+    // }
+
+    TupleExpr te = getTupleExpr(q2);
+    System.out.println("Bindings are " + te.getBindingNames());
+    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
+    System.out.println("Originial query is " + te);
+    qjs.optimize(te, null, null);
+    System.out.println("Optimized query is " + te);
+    // System.out.println("Bindings are " + te.getBindingNames());
+    Assert.assertTrue(te.equals(getTupleExpr(Q2)));
+
+  }
+
+  @Test
+  public void testOptimizeQ3() throws Exception {
+
+    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+    accc.setConf(arc);
+    accc.setConnector(conn);
+    accc.setRdfEvalDAO(res);
+    accc.init();
+
+    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
+    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
+    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
+    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
+    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
+    String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt";
+    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
+
+    List<Mutation> mList = new ArrayList<Mutation>();
+    List<Mutation> mList2 = new ArrayList<Mutation>();
+    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
+    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11;
+
+    m1 = new Mutation(s1 + DELIM + "3");
+    m1.put(new Text("count"), new Text(""), new Value("5".getBytes()));
+    m2 = new Mutation(s2 + DELIM + "2");
+    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m3 = new Mutation(s3 + DELIM + "1");
+    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m4 = new Mutation(s4 + DELIM + "1");
+    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m5 = new Mutation(s5 + DELIM + "1");
+    m5.put(new Text("count"), new Text(""), new Value("5".getBytes()));
+    m6 = new Mutation(s6 + DELIM + "1");
+    m6.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m7 = new Mutation(s7 + DELIM + "1");
+    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m8 = new Mutation(s8 + DELIM + "1");
+    m8.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m9 = new Mutation(s9 + DELIM + "1");
+    m9.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+    m10 = new Mutation(s10 + DELIM + "1");
+    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+
+    mList.add(m1);
+    mList.add(m2);
+    mList.add(m3);
+    mList.add(m4);
+    mList.add(m5);
+    mList.add(m6);
+    mList.add(m7);
+    mList.add(m8);
+    mList.add(m9);
+    mList.add(m10);
+
+    bw1.addMutations(mList);
+    bw1.close();
+
+    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getValue().get())));
+    }
+
+    m1 = new Mutation(s1);
+    m2 = new Mutation(s2);
+    m3 = new Mutation(s3);
+    m4 = new Mutation(s4);
+    m5 = new Mutation(s5);
+    m6 = new Mutation(s6);
+    m7 = new Mutation(s7);
+    m8 = new Mutation(s8);
+    m9 = new Mutation(s9);
+    m10 = new Mutation(s10);
+    m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+    m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+    int i = 2;
+    int j = 3;
+    int k = 4;
+    int l = 5;
+    Long count1;
+    Long count2;
+    Long count3;
+    Long count4;
+
+    for (String s : sList) {
+      count1 = (long) i;
+      count2 = (long) j;
+      count3 = (long) k;
+      count4 = (long) l;
+      m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
+      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
+      m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+
+      i = 2 * i;
+      j = 2 * j;
+      k = 2 * k;
+      l = 2 * l;
+    }
+    mList2.add(m1);
+    mList2.add(m2);
+    mList2.add(m3);
+    mList2.add(m5);
+    mList2.add(m4);
+    mList2.add(m6);
+    mList2.add(m7);
+    mList2.add(m8);
+    mList2.add(m9);
+    mList2.add(m10);
+    mList2.add(m11);
+    bw2.addMutations(mList2);
+    bw2.close();
+
+    scan = conn.createScanner("rya_selectivity", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+
+    }
+
+    TupleExpr te = getTupleExpr(q3);
+    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
+    System.out.println("Originial query is " + te);
+    qjs.optimize(te, null, null);
+
+    System.out.print("Optimized query is " + te);
+
+  }
+
+  @Test
+  public void testOptimizeQ4() throws Exception {
+
+    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+    accc.setConf(arc);
+    accc.setConnector(conn);
+    accc.setRdfEvalDAO(res);
+    accc.init();
+
+    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
+    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
+    List<Mutation> mList = new ArrayList<Mutation>();
+    List<Mutation> mList2 = new ArrayList<Mutation>();
+    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
+    Mutation m1, m2, m3, m4, m5, m6;
+
+    m1 = new Mutation(s1 + DELIM + "3");
+    m1.put(new Text("count"), new Text(""), new Value("4".getBytes()));
+    m2 = new Mutation(s2 + DELIM + "2");
+    m2.put(new Text("count"), new Text(""), new Value("0".getBytes()));
+    m3 = new Mutation(s3 + DELIM + "1");
+    m3.put(new Text("count"), new Text(""), new Value("8".getBytes()));
+    m4 = new Mutation(s4 + DELIM + "1");
+    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m5 = new Mutation(s5 + DELIM + "1");
+    m5.put(new Text("count"), new Text(""), new Value("0".getBytes()));
+    mList.add(m1);
+    mList.add(m2);
+    mList.add(m3);
+    mList.add(m4);
+    mList.add(m5);
+
+    bw1.addMutations(mList);
+    bw1.close();
+
+    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getValue().get())));
+    }
+
+    m1 = new Mutation(s1);
+    m2 = new Mutation(s2);
+    m3 = new Mutation(s3);
+    m4 = new Mutation(s4);
+    m5 = new Mutation(s5);
+    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+    int i = 2;
+    int j = 3;
+    int k = 4;
+    Long count1;
+    Long count2;
+    Long count3;
+
+    for (String s : sList) {
+      count1 = (long) i;
+      count2 = (long) j;
+      count3 = (long) k;
+      m1.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+
+      i = 2 * i;
+      j = 2 * j;
+      k = 2 * k;
+    }
+    mList2.add(m1);
+    mList2.add(m2);
+    mList2.add(m3);
+    mList2.add(m5);
+    mList2.add(m4);
+    mList2.add(m6);
+    bw2.addMutations(mList2);
+    bw2.close();
+
+    scan = conn.createScanner("rya_selectivity", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+
+    }
+
+    TupleExpr te = getTupleExpr(q2);
+    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
+    System.out.println("Originial query is " + te);
+    qjs.optimize(te, null, null);
+    Assert.assertTrue(te.equals(getTupleExpr(Q4)));
+
+    System.out.print("Optimized query is " + te);
+
+  }
+
+  @Test
+  public void testOptimizeQ5() throws Exception {
+
+    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+    accc.setConf(arc);
+    accc.setConnector(conn);
+    accc.setRdfEvalDAO(res);
+    accc.init();
+
+    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+    String s5 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television";
+    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
+    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
+    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
+    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
+    String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt";
+    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
+
+    List<Mutation> mList = new ArrayList<Mutation>();
+    List<Mutation> mList2 = new ArrayList<Mutation>();
+    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
+    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11;
+
+    m1 = new Mutation(s1 + DELIM + "3");
+    m1.put(new Text("count"), new Text(""), new Value("5".getBytes()));
+    m2 = new Mutation(s2 + DELIM + "2");
+    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m3 = new Mutation(s3 + DELIM + "1");
+    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m4 = new Mutation(s4 + DELIM + "1");
+    m4.put(new Text("count"), new Text(""), new Value("0".getBytes()));
+    m5 = new Mutation(s5 + DELIM + "1");
+    m5.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+    m6 = new Mutation(s6 + DELIM + "1");
+    m6.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m7 = new Mutation(s7 + DELIM + "1");
+    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m8 = new Mutation(s8 + DELIM + "1");
+    m8.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m9 = new Mutation(s9 + DELIM + "1");
+    m9.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+    m10 = new Mutation(s10 + DELIM + "1");
+    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+
+    mList.add(m1);
+    mList.add(m2);
+    mList.add(m3);
+    mList.add(m4);
+    mList.add(m5);
+    mList.add(m6);
+    mList.add(m7);
+    mList.add(m8);
+    mList.add(m9);
+    mList.add(m10);
+
+    bw1.addMutations(mList);
+    bw1.close();
+
+    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getValue().get())));
+    }
+
+    m1 = new Mutation(s1);
+    m2 = new Mutation(s2);
+    m3 = new Mutation(s3);
+    m4 = new Mutation(s4);
+    m5 = new Mutation(s5);
+    m6 = new Mutation(s6);
+    m7 = new Mutation(s7);
+    m8 = new Mutation(s8);
+    m9 = new Mutation(s9);
+    m10 = new Mutation(s10);
+    m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+    m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+    int i = 2;
+    int j = 3;
+    int k = 4;
+    int l = 5;
+    Long count1;
+    Long count2;
+    Long count3;
+    Long count4;
+
+    for (String s : sList) {
+      count1 = (long) i;
+      count2 = (long) j;
+      count3 = (long) k;
+      count4 = (long) l;
+      m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
+      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
+      m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+
+      i = 2 * i;
+      j = 2 * j;
+      k = 2 * k;
+      l = 2 * l;
+    }
+    mList2.add(m1);
+    mList2.add(m2);
+    mList2.add(m3);
+    mList2.add(m5);
+    mList2.add(m4);
+    mList2.add(m6);
+    mList2.add(m7);
+    mList2.add(m8);
+    mList2.add(m9);
+    mList2.add(m10);
+    mList2.add(m11);
+    bw2.addMutations(mList2);
+    bw2.close();
+
+    scan = conn.createScanner("rya_selectivity", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+
+    }
+
+    TupleExpr te = getTupleExpr(q5);
+    System.out.println("Bindings are " + te.getBindingNames());
+    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
+    System.out.println("Originial query is " + te);
+    qjs.optimize(te, null, null);
+    System.out.println("Bindings are " + te.getBindingNames());
+
+    System.out.print("Optimized query is " + te);
+
+  }
+  
+  
+  
+  
+  
+  
+  
+  
+  @Test
+  public void testOptimizeQ6() throws Exception {
+
+    RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
+    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
+    accc.setConf(arc);
+    accc.setConnector(conn);
+    accc.setRdfEvalDAO(res);
+    accc.init();
+
+    BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
+    BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
+
+    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
+    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
+    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
+    String s5 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television";
+    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
+    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
+    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
+    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
+    String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt";
+    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
+
+    List<Mutation> mList = new ArrayList<Mutation>();
+    List<Mutation> mList2 = new ArrayList<Mutation>();
+    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
+    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11;
+
+    m1 = new Mutation(s1 + DELIM + "3");
+    m1.put(new Text("count"), new Text(""), new Value("5".getBytes()));
+    m2 = new Mutation(s2 + DELIM + "2");
+    m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m3 = new Mutation(s3 + DELIM + "1");
+    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m4 = new Mutation(s4 + DELIM + "1");
+    m4.put(new Text("count"), new Text(""), new Value("0".getBytes()));
+    m5 = new Mutation(s5 + DELIM + "1");
+    m5.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+    m6 = new Mutation(s6 + DELIM + "1");
+    m6.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m7 = new Mutation(s7 + DELIM + "1");
+    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
+    m8 = new Mutation(s8 + DELIM + "1");
+    m8.put(new Text("count"), new Text(""), new Value("3".getBytes()));
+    m9 = new Mutation(s9 + DELIM + "1");
+    m9.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+    m10 = new Mutation(s10 + DELIM + "1");
+    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
+
+    mList.add(m1);
+    mList.add(m2);
+    mList.add(m3);
+    mList.add(m4);
+    mList.add(m5);
+    mList.add(m6);
+    mList.add(m7);
+    mList.add(m8);
+    mList.add(m9);
+    mList.add(m10);
+
+    bw1.addMutations(mList);
+    bw1.close();
+
+    Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getValue().get())));
+    }
+
+    m1 = new Mutation(s1);
+    m2 = new Mutation(s2);
+    m3 = new Mutation(s3);
+    m4 = new Mutation(s4);
+    m5 = new Mutation(s5);
+    m6 = new Mutation(s6);
+    m7 = new Mutation(s7);
+    m8 = new Mutation(s8);
+    m9 = new Mutation(s9);
+    m10 = new Mutation(s10);
+    m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
+    m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
+    int i = 2;
+    int j = 3;
+    int k = 4;
+    int l = 5;
+    Long count1;
+    Long count2;
+    Long count3;
+    Long count4;
+
+    for (String s : sList) {
+      count1 = (long) i;
+      count2 = (long) j;
+      count3 = (long) k;
+      count4 = (long) l;
+      m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
+      m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
+      m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+      m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
+      m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
+      m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
+
+      i = 2 * i;
+      j = 2 * j;
+      k = 2 * k;
+      l = 2 * l;
+    }
+    mList2.add(m1);
+    mList2.add(m2);
+    mList2.add(m3);
+    mList2.add(m5);
+    mList2.add(m4);
+    mList2.add(m6);
+    mList2.add(m7);
+    mList2.add(m8);
+    mList2.add(m9);
+    mList2.add(m10);
+    mList2.add(m11);
+    bw2.addMutations(mList2);
+    bw2.close();
+
+    scan = conn.createScanner("rya_selectivity", new Authorizations());
+    scan.setRange(new Range());
+
+    for (Map.Entry<Key,Value> entry : scan) {
+      System.out.println("Key row string is " + entry.getKey().getRow().toString());
+      System.out.println("Key is " + entry.getKey());
+      System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
+
+    }
+
+    TupleExpr te = getTupleExpr(q6);
+    TupleExpr te2 = (TupleExpr) te.clone();
+    System.out.println("Bindings are " + te.getBindingNames());
+    RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
+    QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
+    System.out.println("Originial query is " + te);
+    qjs.optimize(te, null, null);
+    
+    
+    
+    FilterOptimizer fo = new FilterOptimizer();
+    fo.optimize(te2, null, null);
+    System.out.print("filter optimized query before js opt is " + te2);
+    qjs.optimize(te2, null, null);
+
+    System.out.println("join selectivity opt query before filter opt is " + te);
+    fo.optimize(te, null, null);
+    
+    System.out.println("join selectivity opt query is " + te);
+    System.out.print("filter optimized query is " + te2);
+
+  }
+  
+  
+  
+  
+  
+  
+  
+  
+  
+  
+  
+  
+  
+
+  private TupleExpr getTupleExpr(String query) throws MalformedQueryException {
+
+    SPARQLParser sp = new SPARQLParser();
+    ParsedQuery pq = sp.parseQuery(query, null);
+
+    return pq.getTupleExpr();
+  }
+
+}