You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@rya.apache.org by ca...@apache.org on 2017/08/25 19:35:32 UTC

[1/5] incubator-rya git commit: RYA-350 Added EmbeddedKafkaSingleton to help eliminate flaky ITs. Closes #214.

Repository: incubator-rya
Updated Branches:
  refs/heads/master fc8d30ac6 -> 051472660


RYA-350 Added EmbeddedKafkaSingleton to help eliminate flaky ITs. Closes #214.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/82df3ad0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/82df3ad0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/82df3ad0

Branch: refs/heads/master
Commit: 82df3ad0ba502ff8fafd184d318231510698342f
Parents: fc8d30a
Author: jdasch <jd...@localhost.localdomain>
Authored: Tue Aug 22 23:08:30 2017 -0400
Committer: Caleb Meier <ca...@parsons.com>
Committed: Fri Aug 25 12:34:15 2017 -0700

----------------------------------------------------------------------
 .../rya/kafka/base/EmbeddedKafkaInstance.java   | 143 +++++++++++++++++++
 .../rya/kafka/base/EmbeddedKafkaSingleton.java  |  87 +++++++++++
 .../org/apache/rya/kafka/base/KafkaITBase.java  |  58 ++------
 .../rya/kafka/base/KafkaTestInstanceRule.java   |  98 +++++++++++++
 .../periodic.service.integration.tests/pom.xml  | 119 ++++++++-------
 .../PeriodicNotificationExporterIT.java         |  98 +++++++------
 .../src/test/resources/log4j.properties         |  37 +++++
 7 files changed, 493 insertions(+), 147 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/82df3ad0/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java
new file mode 100644
index 0000000..97d8b90
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.kafka.base;
+
+import java.nio.file.Files;
+import java.util.Properties;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.fluo.core.util.PortUtils;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import kafka.server.KafkaConfig;
+import kafka.server.KafkaConfig$;
+import kafka.server.KafkaServer;
+import kafka.utils.MockTime;
+import kafka.utils.TestUtils;
+import kafka.utils.Time;
+import kafka.zk.EmbeddedZookeeper;
+
+/**
+ * This class provides a {@link KafkaServer} and a dedicated
+ * {@link EmbeddedZookeeper} server for integtration testing. Both servers use a
+ * random free port, so it is necesssary to use the
+ * {@link #getZookeeperConnect()} and {@link #createBootstrapServerConfig()}
+ * methods to determine how to connect to them.
+ *
+ */
+public class EmbeddedKafkaInstance {
+
+    private static final Logger logger = LoggerFactory.getLogger(EmbeddedKafkaInstance.class);
+
+    private static final AtomicInteger KAFKA_TOPIC_COUNTER = new AtomicInteger(1);
+    private static final String IPv4_LOOPBACK = "127.0.0.1";
+    private static final String ZKHOST = IPv4_LOOPBACK;
+    private static final String BROKERHOST = IPv4_LOOPBACK;
+    private KafkaServer kafkaServer;
+    private EmbeddedZookeeper zkServer;
+    private String brokerPort;
+    private String zookeperConnect;
+
+    /**
+     * Starts the Embedded Kafka and Zookeeper Servers.
+     * @throws Exception - If an exeption occurs during startup.
+     */
+    protected void startup() throws Exception {
+        // Setup the embedded zookeeper
+        logger.info("Starting up Embedded Zookeeper...");
+        zkServer = new EmbeddedZookeeper();
+        zookeperConnect = ZKHOST + ":" + zkServer.port();
+        logger.info("Embedded Zookeeper started at: {}", zookeperConnect);
+
+        // setup Broker
+        logger.info("Starting up Embedded Kafka...");
+        brokerPort = Integer.toString(PortUtils.getRandomFreePort());
+        final Properties brokerProps = new Properties();
+        brokerProps.setProperty(KafkaConfig$.MODULE$.BrokerIdProp(), "0");
+        brokerProps.setProperty(KafkaConfig$.MODULE$.HostNameProp(), BROKERHOST);
+        brokerProps.setProperty(KafkaConfig$.MODULE$.PortProp(), brokerPort);
+        brokerProps.setProperty(KafkaConfig$.MODULE$.ZkConnectProp(), zookeperConnect);
+        brokerProps.setProperty(KafkaConfig$.MODULE$.LogDirsProp(), Files.createTempDirectory(getClass().getSimpleName() + "-").toAbsolutePath().toString());
+        final KafkaConfig config = new KafkaConfig(brokerProps);
+        final Time mock = new MockTime();
+        kafkaServer = TestUtils.createServer(config, mock);
+        logger.info("Embedded Kafka Server started at: {}:{}", BROKERHOST, brokerPort);
+    }
+
+    /**
+     * Shutdown the Embedded Kafka and Zookeeper.
+     * @throws Exception
+     */
+    protected void shutdown() throws Exception {
+        try {
+            if(kafkaServer != null) {
+                kafkaServer.shutdown();
+            }
+        } finally {
+            if(zkServer != null) {
+                zkServer.shutdown();
+            }
+        }
+    }
+
+    /**
+     * @return A new Property object containing the correct value of
+     *         {@link CommonClientConfigs#BOOTSTRAP_SERVERS_CONFIG}, for
+     *         connecting to this instance.
+     */
+    public Properties createBootstrapServerConfig() {
+        final Properties config = new Properties();
+        config.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, BROKERHOST + ":" + brokerPort);
+        return config;
+    }
+
+    /**
+     *
+     * @return The host of the Kafka Broker.
+     */
+    public String getBrokerHost() {
+        return BROKERHOST;
+    }
+
+    /**
+     *
+     * @return The port of the Kafka Broker.
+     */
+    public String getBrokerPort() {
+        return brokerPort;
+    }
+
+    /**
+     *
+     * @return The Zookeeper Connect String.
+     */
+    public String getZookeeperConnect() {
+        return zookeperConnect;
+    }
+
+    /**
+     *
+     * @return A unique Kafka topic name for this instance.
+     */
+    public String getUniqueTopicName() {
+        return "topic_" + KAFKA_TOPIC_COUNTER.getAndIncrement() + "_";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/82df3ad0/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java
new file mode 100644
index 0000000..933377b
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.kafka.base;
+
+import java.io.IOException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Provides a singleton instance of an {@link EmbeddedKafkaInstance} and
+ * includes a shutdown hook to ensure any open resources are closed on JVM exit.
+ * <p>
+ * This class is derived from MiniAccumuloSingleton.
+ */
+public class EmbeddedKafkaSingleton {
+
+    public static EmbeddedKafkaInstance getInstance() {
+        return InstanceHolder.SINGLETON.instance;
+    }
+
+    private EmbeddedKafkaSingleton() {
+        // hiding implicit default constructor
+    }
+
+    private enum InstanceHolder {
+
+        SINGLETON;
+
+        private final Logger log;
+        private final EmbeddedKafkaInstance instance;
+
+        InstanceHolder() {
+            this.log = LoggerFactory.getLogger(EmbeddedKafkaInstance.class);
+            this.instance = new EmbeddedKafkaInstance();
+            try {
+                this.instance.startup();
+
+                // JUnit does not have an overall lifecycle event for tearing down
+                // this kind of resource, but shutdown hooks work alright in practice
+                // since this should only be used during testing
+
+                // The only other alternative for lifecycle management is to use a
+                // suite lifecycle to enclose the tests that need this resource.
+                // In practice this becomes unwieldy.
+
+                Runtime.getRuntime().addShutdownHook(new Thread() {
+                    @Override
+                    public void run() {
+                        try {
+                            InstanceHolder.this.instance.shutdown();
+                        } catch (final Throwable t) {
+                            // logging frameworks will likely be shut down
+                            t.printStackTrace(System.err);
+                        }
+                    }
+                });
+
+            } catch (final InterruptedException e) {
+                Thread.currentThread().interrupt();
+                log.error("Interrupted while starting EmbeddedKafkaInstance", e);
+            } catch (final IOException e) {
+                log.error("Unexpected error while starting EmbeddedKafkaInstance", e);
+            } catch (final Throwable e) {
+                // catching throwable because failure to construct an enum
+                // instance will lead to another error being thrown downstream
+                log.error("Unexpected throwable while starting EmbeddedKafkaInstance", e);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/82df3ad0/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java
index b9be828..da4526c 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java
@@ -18,61 +18,21 @@
  */
 package org.apache.rya.kafka.base;
 
-import java.nio.file.Files;
 import java.util.Properties;
 
-import org.I0Itec.zkclient.ZkClient;
-import org.junit.After;
-import org.junit.Before;
-
-import kafka.server.KafkaConfig;
-import kafka.server.KafkaServer;
-import kafka.utils.MockTime;
-import kafka.utils.TestUtils;
-import kafka.utils.Time;
-import kafka.utils.ZKStringSerializer$;
-import kafka.utils.ZkUtils;
-import kafka.zk.EmbeddedZookeeper;
-
+/**
+ * A class intended to be extended for Kafka Integration tests.
+ */
 public class KafkaITBase {
 
-    private static final String ZKHOST = "127.0.0.1";
-    private static final String BROKERHOST = "127.0.0.1";
-    private static final String BROKERPORT = "9092";
-    private KafkaServer kafkaServer;
-    private EmbeddedZookeeper zkServer;
-    private ZkClient zkClient;
-    
-    @Before
-    public void setupKafka() throws Exception {
+    private static EmbeddedKafkaInstance embeddedKafka = EmbeddedKafkaSingleton.getInstance();
 
-        // Setup Kafka.
-        zkServer = new EmbeddedZookeeper();
-        final String zkConnect = ZKHOST + ":" + zkServer.port();
-        zkClient = new ZkClient(zkConnect, 30000, 30000, ZKStringSerializer$.MODULE$);
-        ZkUtils.apply(zkClient, false);
-
-        // setup Broker
-        final Properties brokerProps = new Properties();
-        brokerProps.setProperty("zookeeper.connect", zkConnect);
-        brokerProps.setProperty("broker.id", "0");
-        brokerProps.setProperty("log.dirs", Files.createTempDirectory("kafka-").toAbsolutePath().toString());
-        brokerProps.setProperty("listeners", "PLAINTEXT://" + BROKERHOST + ":" + BROKERPORT);
-        final KafkaConfig config = new KafkaConfig(brokerProps);
-        final Time mock = new MockTime();
-        kafkaServer = TestUtils.createServer(config, mock);
-    }
-    
     /**
-     * Close all the Kafka mini server and mini-zookeeper
-     *
-     * @see org.apache.rya.indexing.pcj.fluo.ITBase#shutdownMiniResources()
+     * @return A new Property object containing the correct value for Kafka's
+     *         {@link CommonClientConfigs#BOOTSTRAP_SERVERS_CONFIG}.
      */
-    @After
-    public void teardownKafka() {
-        kafkaServer.shutdown();
-        zkClient.close();
-        zkServer.shutdown();
+    protected Properties createBootstrapServerConfig() {
+        return embeddedKafka.createBootstrapServerConfig();
     }
-    
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/82df3ad0/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java
new file mode 100644
index 0000000..a9ee7b5
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.kafka.base;
+
+import java.util.Properties;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.junit.rules.ExternalResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import kafka.admin.AdminUtils;
+import kafka.admin.RackAwareMode;
+import kafka.utils.ZKStringSerializer$;
+import kafka.utils.ZkUtils;
+
+
+/**
+ * Provides a JUnit Rule for interacting with the {@link EmbeddedKafkaSingleton}.
+ *
+ */
+public class KafkaTestInstanceRule extends ExternalResource {
+    private static final Logger logger = LoggerFactory.getLogger(KafkaTestInstanceRule.class);
+    private static final EmbeddedKafkaInstance kafkaInstance = EmbeddedKafkaSingleton.getInstance();
+    private String kafkaTopicName;
+    private final boolean createTopic;
+
+    /**
+     * @param createTopic - If true, a topic shall be created for the value
+     *            provided by {@link #getKafkaTopicName()}. If false, no topics
+     *            shall be created.
+     */
+    public KafkaTestInstanceRule(final boolean createTopic) {
+        this.createTopic = createTopic;
+    }
+
+    /**
+     * @return A unique topic name for this test execution. If multiple topics are required by a test, use this value as
+     *         a prefix.
+     */
+    public String getKafkaTopicName() {
+        if (kafkaTopicName == null) {
+            throw new IllegalStateException("Cannot get Kafka Topic Name outside of a test execution.");
+        }
+        return kafkaTopicName;
+    }
+
+    @Override
+    protected void before() throws Throwable {
+        // Get the next kafka topic name.
+        kafkaTopicName = kafkaInstance.getUniqueTopicName();
+
+        if(createTopic) {
+            createTopic(kafkaTopicName);
+        }
+    }
+
+    @Override
+    protected void after() {
+        kafkaTopicName = null;
+    }
+
+    /**
+     * Utility method to provide additional unique topics if they are required.
+     * @param topicName - The Kafka topic to create.
+     */
+    public void createTopic(final String topicName) {
+        // Setup Kafka.
+        ZkUtils zkUtils = null;
+        try {
+            logger.info("Creating Kafka Topic: '{}'", topicName);
+            zkUtils = ZkUtils.apply(new ZkClient(kafkaInstance.getZookeeperConnect(), 30000, 30000, ZKStringSerializer$.MODULE$), false);
+            AdminUtils.createTopic(zkUtils, topicName, 1, 1, new Properties(), RackAwareMode.Disabled$.MODULE$);
+        }
+        finally {
+            if(zkUtils != null) {
+                zkUtils.close();
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/82df3ad0/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml b/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml
index 1b784a6..20a0647 100644
--- a/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml
+++ b/extras/rya.periodic.service/periodic.service.integration.tests/pom.xml
@@ -1,62 +1,71 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<!-- Licensed to the Apache Software Foundation (ASF) under one or more 
-		contributor license agreements. See the NOTICE file distributed with this 
-		work for additional information regarding copyright ownership. The ASF licenses 
-		this file to you under the Apache License, Version 2.0 (the "License"); you 
-		may not use this file except in compliance with the License. You may obtain 
-		a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
-		required by applicable law or agreed to in writing, software distributed 
-		under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
-		OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
-		the specific language governing permissions and limitations under the License. -->
-	<modelVersion>4.0.0</modelVersion>
+    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya.periodic.service</artifactId>
+        <version>3.2.11-incubating-SNAPSHOT</version>
+    </parent>
 
-	<parent>
-		<groupId>org.apache.rya</groupId>
-		<artifactId>rya.periodic.service</artifactId>
-		<version>3.2.11-incubating-SNAPSHOT</version>
-	</parent>
+    <artifactId>rya.periodic.service.integration.tests</artifactId>
 
-	<artifactId>rya.periodic.service.integration.tests</artifactId>
-	
-	<name>Apache Rya Periodic Service Integration Tests</name>
+    <name>Apache Rya Periodic Service Integration Tests</name>
     <description>Integration Tests for Rya Periodic Service</description>
 
-	<dependencies>
-		<dependency>
-			<groupId>org.apache.rya</groupId>
-			<artifactId>rya.pcj.fluo.test.base</artifactId>
-			<exclusions>
-				<exclusion>
-					<artifactId>log4j-1.2-api</artifactId>
-					<groupId>org.apache.logging.log4j</groupId>
-				</exclusion>
-				<exclusion>
-					<artifactId>log4j-api</artifactId>
-					<groupId>org.apache.logging.log4j</groupId>
-				</exclusion>
-				<exclusion>
-					<artifactId>log4j-core</artifactId>
-					<groupId>org.apache.logging.log4j</groupId>
-				</exclusion>
-			</exclusions>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.rya</groupId>
-			<artifactId>rya.periodic.service.notification</artifactId>
-			<version>${project.version}</version>
-			<exclusions>
-				<exclusion>
-					<artifactId>logback-classic</artifactId>
-					<groupId>ch.qos.logback</groupId>
-				</exclusion>
-				<exclusion>
-					<artifactId>logback-core</artifactId>
-					<groupId>ch.qos.logback</groupId>
-				</exclusion>
-			</exclusions>
-		</dependency>
-	</dependencies>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.pcj.fluo.test.base</artifactId>
+            <exclusions>
+                <exclusion>
+                    <artifactId>log4j-1.2-api</artifactId>
+                    <groupId>org.apache.logging.log4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-api</artifactId>
+                    <groupId>org.apache.logging.log4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-core</artifactId>
+                    <groupId>org.apache.logging.log4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
+            <artifactId>rya.periodic.service.notification</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>logback-classic</artifactId>
+                    <groupId>ch.qos.logback</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>logback-core</artifactId>
+                    <groupId>ch.qos.logback</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+    </dependencies>
 
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/82df3ad0/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
index c0efc4f..c5dc809 100644
--- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
+++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
@@ -22,6 +22,7 @@ import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Properties;
 import java.util.Set;
+import java.util.UUID;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 
@@ -34,8 +35,10 @@ import org.apache.kafka.common.serialization.StringDeserializer;
 import org.apache.kafka.common.serialization.StringSerializer;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
 import org.apache.rya.kafka.base.KafkaITBase;
+import org.apache.rya.kafka.base.KafkaTestInstanceRule;
 import org.apache.rya.periodic.notification.serialization.BindingSetSerDe;
 import org.junit.Assert;
+import org.junit.Rule;
 import org.junit.Test;
 import org.openrdf.model.ValueFactory;
 import org.openrdf.model.impl.ValueFactoryImpl;
@@ -44,82 +47,91 @@ import org.openrdf.query.algebra.evaluation.QueryBindingSet;
 
 public class PeriodicNotificationExporterIT extends KafkaITBase {
 
+
+    @Rule
+    public KafkaTestInstanceRule kafkaTestInstanceRule = new KafkaTestInstanceRule(false);
+
+
     private static final ValueFactory vf = new ValueFactoryImpl();
-    
+
     @Test
     public void testExporter() throws InterruptedException {
-        
-        BlockingQueue<BindingSetRecord> records = new LinkedBlockingQueue<>();
-        Properties props = createKafkaConfig();
-        
-        KafkaExporterExecutor exporter = new KafkaExporterExecutor(new KafkaProducer<String, BindingSet>(props), 1, records);
+
+        final String topic1 = kafkaTestInstanceRule.getKafkaTopicName() + "1";
+        final String topic2 = kafkaTestInstanceRule.getKafkaTopicName() + "2";
+
+        kafkaTestInstanceRule.createTopic(topic1);
+        kafkaTestInstanceRule.createTopic(topic2);
+
+        final BlockingQueue<BindingSetRecord> records = new LinkedBlockingQueue<>();
+
+        final KafkaExporterExecutor exporter = new KafkaExporterExecutor(new KafkaProducer<String, BindingSet>(createKafkaProducerConfig()), 1, records);
         exporter.start();
-        
-        QueryBindingSet bs1 = new QueryBindingSet();
+        final QueryBindingSet bs1 = new QueryBindingSet();
         bs1.addBinding(PeriodicQueryResultStorage.PeriodicBinId, vf.createLiteral(1L));
         bs1.addBinding("name", vf.createURI("uri:Bob"));
-        BindingSetRecord record1 = new BindingSetRecord(bs1, "topic1");
-        
-        QueryBindingSet bs2 = new QueryBindingSet();
+        final BindingSetRecord record1 = new BindingSetRecord(bs1, topic1);
+
+        final QueryBindingSet bs2 = new QueryBindingSet();
         bs2.addBinding(PeriodicQueryResultStorage.PeriodicBinId, vf.createLiteral(2L));
         bs2.addBinding("name", vf.createURI("uri:Joe"));
-        BindingSetRecord record2 = new BindingSetRecord(bs2, "topic2");
-        
+        final BindingSetRecord record2 = new BindingSetRecord(bs2, topic2);
+
         records.add(record1);
         records.add(record2);
-        
-        Set<BindingSet> expected1 = new HashSet<>();
+
+        final Set<BindingSet> expected1 = new HashSet<>();
         expected1.add(bs1);
-        Set<BindingSet> expected2 = new HashSet<>();
+        final Set<BindingSet> expected2 = new HashSet<>();
         expected2.add(bs2);
-        
-        Set<BindingSet> actual1 = getBindingSetsFromKafka("topic1");
-        Set<BindingSet> actual2 = getBindingSetsFromKafka("topic2");
-        
+
+        final Set<BindingSet> actual1 = getBindingSetsFromKafka(topic1);
+        final Set<BindingSet> actual2 = getBindingSetsFromKafka(topic2);
+
         Assert.assertEquals(expected1, actual1);
         Assert.assertEquals(expected2, actual2);
-        
+
         exporter.stop();
-        
     }
-    
-    
-    private Properties createKafkaConfig() {
-        Properties props = new Properties();
-        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
-        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "group0");
-        props.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, "consumer0");
-        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+
+
+    private Properties createKafkaProducerConfig() {
+        final Properties props = createBootstrapServerConfig();
         props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
         props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, BindingSetSerDe.class.getName());
+        return props;
+    }
+    private Properties createKafkaConsumerConfig() {
+        final Properties props = createBootstrapServerConfig();
+        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
+        props.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, "consumer0");
+        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
         props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
         props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BindingSetSerDe.class.getName());
-
         return props;
     }
-    
-    
-    private KafkaConsumer<String, BindingSet> makeBindingSetConsumer(final String TopicName) {
+
+
+    private KafkaConsumer<String, BindingSet> makeBindingSetConsumer(final String topicName) {
         // setup consumer
-        final Properties consumerProps = createKafkaConfig();
-        final KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(consumerProps);
-        consumer.subscribe(Arrays.asList(TopicName));
+        final KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(createKafkaConsumerConfig());
+        consumer.subscribe(Arrays.asList(topicName));
         return consumer;
     }
-    
-    private Set<BindingSet> getBindingSetsFromKafka(String topic) {
+
+    private Set<BindingSet> getBindingSetsFromKafka(final String topicName) {
         KafkaConsumer<String, BindingSet> consumer = null;
 
         try {
-            consumer = makeBindingSetConsumer(topic);
-            ConsumerRecords<String, BindingSet> records = consumer.poll(5000);
+            consumer = makeBindingSetConsumer(topicName);
+            final ConsumerRecords<String, BindingSet> records = consumer.poll(20000);  // Wait up to 20 seconds for a result to be published.
 
-            Set<BindingSet> bindingSets = new HashSet<>();
+            final Set<BindingSet> bindingSets = new HashSet<>();
             records.forEach(x -> bindingSets.add(x.value()));
 
             return bindingSets;
 
-        } catch (Exception e) {
+        } catch (final Exception e) {
             throw new RuntimeException(e);
         } finally {
             if (consumer != null) {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/82df3ad0/extras/rya.periodic.service/periodic.service.integration.tests/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/resources/log4j.properties b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/resources/log4j.properties
new file mode 100644
index 0000000..19cc13c
--- /dev/null
+++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/resources/log4j.properties
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Valid levels:
+# TRACE, DEBUG, INFO, WARN, ERROR and FATAL
+log4j.rootLogger=INFO, CONSOLE
+
+# Set independent logging levels
+log4j.logger.org.apache.zookeeper=WARN
+log4j.logger.kafka=WARN
+log4j.logger.org.apache.kafka=WARN
+
+# LOGFILE is set to be a File appender using a PatternLayout.
+log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
+#log4j.appender.CONSOLE.Threshold=DEBUG
+
+log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
+log4j.appender.CONSOLE.layout.ConversionPattern=%d [%t] %-5p %c - %m%n
+
+#log4j.appender.CONSOLE.layout=org.apache.log4j.EnhancedPatternLayout
+#log4j.appender.CONSOLE.layout.ConversionPattern=%d [%t] %-5p %c{1.} - %m%n
\ No newline at end of file


[2/5] incubator-rya git commit: RYA-246-Query-Export-Strategy. Closes #213.

Posted by ca...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
index 97e3f22..516690e 100644
--- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
+++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
@@ -26,9 +26,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import org.apache.fluo.api.client.FluoClient;
 import org.apache.fluo.api.client.Snapshot;
 import org.apache.fluo.api.client.SnapshotBase;
-import org.apache.fluo.api.data.Bytes;
 import org.apache.log4j.Logger;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
+import org.apache.rya.indexing.pcj.fluo.app.NodeType;
 import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil;
 import org.apache.rya.periodic.notification.api.BinPruner;
 import org.apache.rya.periodic.notification.api.NodeBin;
@@ -79,12 +78,12 @@ public class PeriodicQueryPruner implements BinPruner, Runnable {
      */
     @Override
     public void pruneBindingSetBin(NodeBin nodeBin) {
-        String id = nodeBin.getNodeId();
+        String pcjId = nodeBin.getNodeId();
         long bin = nodeBin.getBin();
         try(Snapshot sx = client.newSnapshot()) {
-            String queryId = sx.get(Bytes.of(id), FluoQueryColumns.PCJ_ID_QUERY_ID).toString();
+            String queryId = NodeType.generateNewIdForType(NodeType.QUERY, pcjId);
             Set<String> fluoIds = getNodeIdsFromResultId(sx, queryId);
-            accPruner.pruneBindingSetBin(new NodeBin(id, bin));
+            accPruner.pruneBindingSetBin(nodeBin);
             for(String fluoId: fluoIds) {
                 fluoPruner.pruneBindingSetBin(new NodeBin(fluoId, bin));
             }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/recovery/PeriodicNotificationProvider.java
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/recovery/PeriodicNotificationProvider.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/recovery/PeriodicNotificationProvider.java
index 27e06f0..69bd39c 100644
--- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/recovery/PeriodicNotificationProvider.java
+++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/recovery/PeriodicNotificationProvider.java
@@ -35,6 +35,7 @@ import org.apache.rya.indexing.pcj.fluo.app.NodeType;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.PeriodicQueryMetadata;
+import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.apache.rya.periodic.notification.api.NotificationCoordinatorExecutor;
 import org.apache.rya.periodic.notification.coordinator.PeriodicNotificationCoordinatorExecutor;
 import org.apache.rya.periodic.notification.notification.CommandNotification;
@@ -120,7 +121,7 @@ public class PeriodicNotificationProvider {
             id = getQueryIdFromPeriodicId(sx, sx.get(Bytes.of(nodeId), FluoQueryColumns.PERIODIC_QUERY_PARENT_NODE_ID).toString());
             break;
         case QUERY:
-            id = sx.get(Bytes.of(nodeId), FluoQueryColumns.RYA_PCJ_ID).toString();
+            id = FluoQueryUtils.convertFluoQueryIdToPcjId(nodeId);
             break;
         case AGGREGATION: 
             id = getQueryIdFromPeriodicId(sx, sx.get(Bytes.of(nodeId), FluoQueryColumns.AGGREGATION_PARENT_NODE_ID).toString());

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java
----------------------------------------------------------------------
diff --git a/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java b/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java
index 9239dc7..8fd95d3 100644
--- a/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java
+++ b/extras/shell/src/main/java/org/apache/rya/shell/RyaAdminCommands.java
@@ -22,8 +22,11 @@ import static java.util.Objects.requireNonNull;
 
 import java.io.IOException;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.api.client.GetInstanceDetails;
 import org.apache.rya.api.client.Install.DuplicateInstanceNameException;
 import org.apache.rya.api.client.Install.InstallConfiguration;
@@ -280,7 +283,11 @@ public class RyaAdminCommands implements CommandMarker {
     }
 
     @CliCommand(value = CREATE_PCJ_CMD, help = "Creates and starts the maintenance of a new PCJ using a Fluo application.")
-    public String createPcj() {
+    public String createPcj(
+            @CliOption(key = {"exportToRya"}, mandatory = false, help = "Indicates that results for the query should be exported to a Rya PCJ table.")
+            boolean exportToRya,
+            @CliOption(key = {"exportToKafka"}, mandatory = false, help = "Indicates that results for the query should be exported to a Kafka Topic.")
+            boolean exportToKafka) {
         // Fetch the command that is connected to the store.
         final ShellState shellState = state.getShellState();
         final RyaClient commands = shellState.getConnectedCommands().get();
@@ -290,8 +297,18 @@ public class RyaAdminCommands implements CommandMarker {
             // Prompt the user for the SPARQL.
             final Optional<String> sparql = sparqlPrompt.getSparql();
             if (sparql.isPresent()) {
+                Set<ExportStrategy> strategies = new HashSet<>();
+                if(exportToRya) {
+                    strategies.add(ExportStrategy.RYA);
+                }
+                if(exportToKafka) {
+                    strategies.add(ExportStrategy.KAFKA);
+                }
+                if(strategies.size() == 0) {
+                    return "The user must specify at least one export strategy by setting either exportToRya or exportToKafka to true."; 
+                }
                 // Execute the command.
-                final String pcjId = commands.getCreatePCJ().createPCJ(ryaInstance, sparql.get());
+                final String pcjId = commands.getCreatePCJ().createPCJ(ryaInstance, sparql.get(), strategies);
                 // Return a message that indicates the ID of the newly created ID.
                 return String.format("The PCJ has been created. Its ID is '%s'.", pcjId);
             } else {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java
----------------------------------------------------------------------
diff --git a/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java b/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java
index e3e8d98..cab34e9 100644
--- a/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java
+++ b/extras/shell/src/test/java/org/apache/rya/shell/RyaAdminCommandsTest.java
@@ -28,6 +28,7 @@ import static org.mockito.Mockito.when;
 import java.io.IOException;
 import java.util.Date;
 import java.util.List;
+import java.util.Set;
 import java.util.TimeZone;
 
 import org.apache.rya.api.client.AddUser;
@@ -43,6 +44,7 @@ import org.apache.rya.api.client.RemoveUser;
 import org.apache.rya.api.client.RyaClient;
 import org.apache.rya.api.client.RyaClientException;
 import org.apache.rya.api.client.Uninstall;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.api.client.accumulo.AccumuloConnectionDetails;
 import org.apache.rya.api.instance.RyaDetails;
 import org.apache.rya.api.instance.RyaDetails.EntityCentricIndexDetails;
@@ -61,6 +63,7 @@ import org.junit.Test;
 
 import com.google.common.base.Optional;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 /**
  * Unit tests the methods of {@link RyaAdminCommands}.
@@ -74,7 +77,8 @@ public class RyaAdminCommandsTest {
         final String sparql = "SELECT * WHERE { ?person <http://isA> ?noun }";
         final String pcjId = "123412342";
         final CreatePCJ mockCreatePCJ = mock(CreatePCJ.class);
-        when(mockCreatePCJ.createPCJ( eq(instanceName), eq(sparql) ) ).thenReturn( pcjId );
+        final Set<ExportStrategy> strategies = Sets.newHashSet(ExportStrategy.RYA);
+        when(mockCreatePCJ.createPCJ( eq(instanceName), eq(sparql), eq(strategies) ) ).thenReturn( pcjId );
 
         final RyaClient mockCommands = mock(RyaClient.class);
         when(mockCommands.getCreatePCJ()).thenReturn( mockCreatePCJ );
@@ -88,10 +92,10 @@ public class RyaAdminCommandsTest {
 
         // Execute the command.
         final RyaAdminCommands commands = new RyaAdminCommands(state, mock(InstallPrompt.class), mockSparqlPrompt, mock(UninstallPrompt.class));
-        final String message = commands.createPcj();
+        final String message = commands.createPcj(true, false);
 
         // Verify the values that were provided to the command were passed through to CreatePCJ.
-        verify(mockCreatePCJ).createPCJ(eq(instanceName), eq(sparql));
+        verify(mockCreatePCJ).createPCJ(eq(instanceName), eq(sparql), eq(strategies));
 
         // Verify a message is returned that explains what was created.
         final String expected = "The PCJ has been created. Its ID is '123412342'.";
@@ -114,7 +118,7 @@ public class RyaAdminCommandsTest {
 
         // Execute the command.
         final RyaAdminCommands commands = new RyaAdminCommands(state, mock(InstallPrompt.class), mockSparqlPrompt, mock(UninstallPrompt.class));
-        final String message = commands.createPcj();
+        final String message = commands.createPcj(true, false);
 
         // Verify a message is returned that explains what was created.
         final String expected = "";


[3/5] incubator-rya git commit: RYA-246-Query-Export-Strategy. Closes #213.

Posted by ca...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java
index 65db02c..17ab14f 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQuery.java
@@ -27,7 +27,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.QueryType;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 
 import com.google.common.base.Objects;
 import com.google.common.base.Optional;
@@ -92,9 +93,9 @@ public class FluoQuery {
         this.filterMetadata = requireNonNull(filterMetadata);
         this.joinMetadata = requireNonNull(joinMetadata);
         if(constructMetadata.isPresent()) {
-            this.type = QueryType.Construct;
+            this.type = QueryType.CONSTRUCT;
         } else {
-            this.type = QueryType.Projection;
+            this.type = QueryType.PROJECTION;
         }
     }
     
@@ -568,8 +569,9 @@ public class FluoQuery {
         
         /**
          * @return Creates a {@link FluoQuery} using the values that have been supplied to this builder.
+         * @throws UnsupportedQueryException 
          */
-        public FluoQuery build() {
+        public FluoQuery build() throws UnsupportedQueryException {
             checkArgument((projectionBuilders.size() > 0 || constructBuilder != null));
             
             Optional<PeriodicQueryMetadata.Builder> optionalPeriodicQueryBuilder = getPeriodicQueryBuilder();
@@ -603,12 +605,18 @@ public class FluoQuery {
                 aggregateMetadata.put(entry.getKey(), entry.getValue().build());
             }
 
+            QueryMetadata qMetadata = queryBuilder.build();
+            
             if(constructBuilder != null) {
                 if(periodicQueryMetadata != null) {
-                    throw new IllegalArgumentException("Queries containing sliding window filters and construct query patterns are not supported.");
+                    throw new UnsupportedQueryException("Queries containing sliding window filters and construct query patterns are not supported.");
                 }
-                return new FluoQuery(queryBuilder.build(), projectionMetadata.build(), Optional.of(constructBuilder.build()), Optional.fromNullable(periodicQueryMetadata), spMetadata.build(), filterMetadata.build(), joinMetadata.build(), aggregateMetadata.build());
+                return new FluoQuery(qMetadata, projectionMetadata.build(), Optional.of(constructBuilder.build()), Optional.fromNullable(periodicQueryMetadata), spMetadata.build(), filterMetadata.build(), joinMetadata.build(), aggregateMetadata.build());
             } else {
+                if(aggregationBuilders.size() > 0 && qMetadata.getQueryType() == QueryType.PROJECTION && qMetadata.getExportStrategies().contains(ExportStrategy.RYA)) {
+                    throw new UnsupportedQueryException("Exporting to Rya PCJ tables is currently not supported for queries containing aggregations.");
+                }
+                
                 return new FluoQuery(queryBuilder.build(), projectionMetadata.build(), Optional.absent(), Optional.fromNullable(periodicQueryMetadata), spMetadata.build(), filterMetadata.build(), joinMetadata.build(), aggregateMetadata.build());
             }
             

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java
index 2eae4ff..8569a48 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryColumns.java
@@ -162,32 +162,6 @@ public class FluoQueryColumns {
      */
     public static final Column TRIPLES = new Column("triples", "SPO");
 
-    /**
-     * Stores the Rya assigned PCJ ID that the query's results reflect. This
-     * value defines where the results will be exported to.
-     * <p>
-     *   <table border="1" style="width:100%">
-     *     <tr> <th>Fluo Row</td> <th>Fluo Column</td> <th>Fluo Value</td> </tr>
-     *     <tr> <td>Query ID</td> <td>query:ryaPcjId</td> <td>Identifies which PCJ the results of this query will be exported to.</td> </tr>
-     *   </table>
-     * </p>
-     */
-    public static final Column RYA_PCJ_ID = new Column("query", "ryaPcjId");
-
-    /**
-     * Associates a PCJ ID with a Query ID. This enables a quick lookup of the Query ID from the PCJ ID and is useful of Deleting PCJs.
-     * <p>
-     *   <table border="1" style="width:100%">
-     *     <tr> <th>Fluo Row</td> <th>Fluo Column</td> <th>Fluo Value</td> </tr>
-     *     <tr> <td>PCJ ID</td> <td>ryaPcjId:queryId</td> <td>Identifies which Query ID is associated with the given PCJ ID.</td> </tr>
-     *   </table>
-     * </p>
-     */
-    public static final Column PCJ_ID_QUERY_ID = new Column("ryaPcjId", "queryId");
-
-    // Sparql to Query ID used to list all queries that are in the system.
-    public static final Column QUERY_ID = new Column("sparql", "queryId");
-
     // Query Metadata columns.
     public static final Column QUERY_NODE_ID = new Column(QUERY_METADATA_CF, "nodeId");
     public static final Column QUERY_VARIABLE_ORDER = new Column(QUERY_METADATA_CF, "variableOrder");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java
index 1c34836..d5d9fe7 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java
@@ -34,11 +34,11 @@ import org.apache.fluo.api.client.SnapshotBase;
 import org.apache.fluo.api.client.TransactionBase;
 import org.apache.fluo.api.data.Bytes;
 import org.apache.fluo.api.data.Column;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.ConstructGraph;
 import org.apache.rya.indexing.pcj.fluo.app.ConstructGraphSerializer;
 import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.ExportStrategy;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.NodeType;
 import org.apache.rya.indexing.pcj.fluo.app.query.AggregationMetadata.AggregationElement;
 import org.apache.rya.indexing.pcj.fluo.app.query.JoinMetadata.JoinType;
@@ -585,21 +585,13 @@ public class FluoQueryMetadataDAO {
         requireNonNull(tx);
         requireNonNull(query);
         
-        QueryMetadata queryMetadata = query.getQueryMetadata();
-        final String sparql = queryMetadata.getSparql();
-        final String queryId = queryMetadata.getNodeId();
-        final String pcjId = queryMetadata.getExportId();
-        
         // The results of the query are eventually exported to an instance
         // of Rya, so store the Rya ID for the PCJ.
-        tx.set(queryId, FluoQueryColumns.RYA_PCJ_ID, pcjId);
-        tx.set(pcjId, FluoQueryColumns.PCJ_ID_QUERY_ID, queryId);
-        tx.set(Bytes.of(sparql), FluoQueryColumns.QUERY_ID, Bytes.of(queryId));
-        write(tx, queryMetadata);
+        write(tx, query.getQueryMetadata());
 
         // Write the rest of the metadata objects.
         
-        if (query.getQueryType() == QueryType.Construct) {
+        if (query.getQueryType() == QueryType.CONSTRUCT) {
             ConstructQueryMetadata constructMetadata = query.getConstructQueryMetadata().get();
             write(tx, constructMetadata);
         }
@@ -636,8 +628,9 @@ public class FluoQueryMetadataDAO {
      * @param sx - The snapshot that will be used to read the metadata from the Fluo table. (not null)
      * @param queryId - The ID of the query whose nodes will be read. (not null)
      * @return The {@link FluoQuery} that was read from table.
+     * @throws UnsupportedQueryException 
      */
-    public FluoQuery readFluoQuery(final SnapshotBase sx, final String queryId) {
+    public FluoQuery readFluoQuery(final SnapshotBase sx, final String queryId) throws UnsupportedQueryException {
         requireNonNull(sx);
         requireNonNull(queryId);
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadata.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadata.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadata.java
index e46b405..40c9e03 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadata.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadata.java
@@ -24,12 +24,11 @@ import java.util.Optional;
 import java.util.Set;
 
 import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.ExportStrategy;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.QueryType;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder;
 
 import com.google.common.base.Objects;
-import com.google.common.base.Preconditions;
 
 import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
 import edu.umd.cs.findbugs.annotations.NonNull;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/SparqlFluoQueryBuilder.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/SparqlFluoQueryBuilder.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/SparqlFluoQueryBuilder.java
index 7bf6f45..7b21575 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/SparqlFluoQueryBuilder.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/SparqlFluoQueryBuilder.java
@@ -40,12 +40,12 @@ import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.ConstructGraph;
 import org.apache.rya.indexing.pcj.fluo.app.ConstructProjection;
 import org.apache.rya.indexing.pcj.fluo.app.FluoStringConverter;
 import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.ExportStrategy;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.NodeType;
 import org.apache.rya.indexing.pcj.fluo.app.query.AggregationMetadata.AggregationElement;
 import org.apache.rya.indexing.pcj.fluo.app.query.AggregationMetadata.AggregationType;
@@ -106,7 +106,7 @@ public class SparqlFluoQueryBuilder {
   
     //Default behavior is to export to Kafka - subject to change when user can 
     //specify their own export strategy
-    private Set<ExportStrategy> exportStrategies = new HashSet<>(Arrays.asList(ExportStrategy.Kafka));
+    private Set<ExportStrategy> exportStrategies = new HashSet<>(Arrays.asList(ExportStrategy.KAFKA));
     
     public SparqlFluoQueryBuilder setSparql(String sparql) {
         this.sparql = Preconditions.checkNotNull(sparql);
@@ -145,7 +145,7 @@ public class SparqlFluoQueryBuilder {
         return this;
     }
     
-    public FluoQuery build() {
+    public FluoQuery build() throws UnsupportedQueryException {
         Preconditions.checkNotNull(sparql);
         Preconditions.checkNotNull(queryId);
         Preconditions.checkNotNull(exportStrategies);
@@ -172,10 +172,12 @@ public class SparqlFluoQueryBuilder {
         QueryMetadata.Builder queryBuilder = QueryMetadata.builder(queryId);
         //sets {@link QueryType} and VariableOrder
         setVarOrderAndQueryType(queryBuilder, te);
-        queryBuilder.setSparql(sparql);
-        queryBuilder.setChildNodeId(childNodeId);
-        queryBuilder.setExportStrategies(exportStrategies);
-        queryBuilder.setJoinBatchSize(joinBatchSize);
+        queryBuilder
+            .setSparql(sparql)
+            .setChildNodeId(childNodeId)
+            .setExportStrategies(exportStrategies)
+            .setJoinBatchSize(joinBatchSize);
+        
         fluoQueryBuilder.setQueryMetadata(queryBuilder);
         
         setChildMetadata(fluoQueryBuilder, childNodeId, queryBuilder.getVariableOrder(), queryId);
@@ -800,7 +802,7 @@ public class SparqlFluoQueryBuilder {
             }
             
             if(queryType == null) {
-                queryType = QueryType.Projection;
+                queryType = QueryType.PROJECTION;
             }
             super.meet(node);
         }
@@ -811,14 +813,14 @@ public class SparqlFluoQueryBuilder {
             }
             
             if(queryType == null) {
-                queryType = QueryType.Construct;
+                queryType = QueryType.CONSTRUCT;
             }
             super.meet(node);
         }
         
         public void meetOther(final QueryModelNode node) throws Exception {
             if (node instanceof PeriodicQueryNode) {
-                queryType = QueryType.Periodic;
+                queryType = QueryType.PERIODIC;
             } else {
                 super.meetOther(node);
             }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/UnsupportedQueryException.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/UnsupportedQueryException.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/UnsupportedQueryException.java
new file mode 100644
index 0000000..155b8da
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/UnsupportedQueryException.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.query;
+
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+
+/**
+ * This Exception thrown if the Rya Fluo Application does not support
+ * the given SPARQL query.  This could happen for a number of reasons. The
+ * two most common reasons are that the query possesses some combination of query nodes
+ * that the application can't evaluate, or that the {@link ExportStrategy} of the query
+ * is incompatible with one of its query nodes.  
+ *
+ */
+public class UnsupportedQueryException extends Exception {
+    private static final long serialVersionUID = 1L;
+
+    public UnsupportedQueryException(final String message) {
+        super(message);
+    }
+
+    public UnsupportedQueryException(final String message, final Throwable cause) {
+        super(message, cause);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FluoQueryUtils.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FluoQueryUtils.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FluoQueryUtils.java
index ac41160..7a5b439 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FluoQueryUtils.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FluoQueryUtils.java
@@ -19,6 +19,7 @@
 package org.apache.rya.indexing.pcj.fluo.app.util;
 
 import java.util.List;
+import java.util.UUID;
 
 import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery;
@@ -60,6 +61,13 @@ public class FluoQueryUtils {
     }
     
     /**
+     * @return - A new pcjId, which is a UUID with all dashes removed
+     */
+    public static String createNewPcjId() {
+        return UUID.randomUUID().toString().replaceAll("-", "");
+    }
+    
+    /**
      * Uses a {@link NodeIdCollector} visitor to do a pre-order traverse of the
      * FluoQuery and gather the nodeIds of the metadata nodes.
      * @param query - FluoQuery to be traversed

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/KafkaExportParametersTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/KafkaExportParametersTest.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/KafkaExportParametersTest.java
index b9c10d4..cd21ed6 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/KafkaExportParametersTest.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/KafkaExportParametersTest.java
@@ -27,12 +27,13 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
 
-import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaExportParameters;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaExportParameterBase;
 import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaBindingSetExporterFactory;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaBindingSetExporterParameters;
 import org.junit.Test;
 
 /**
- * Tests the methods of {@link KafkaExportParameters}.
+ * Tests the methods of {@link KafkaExportParameterBase}.
  */
 public class KafkaExportParametersTest {
 
@@ -41,19 +42,19 @@ public class KafkaExportParametersTest {
         final Map<String, String> params = new HashMap<>();
 
         // Load some values into the params using the wrapper.
-        final KafkaExportParameters kafkaParams = new KafkaExportParameters(params);
-        kafkaParams.setExportToKafka(true);
+        final KafkaBindingSetExporterParameters kafkaParams = new KafkaBindingSetExporterParameters(params);
+        kafkaParams.setUseKafkaBindingSetExporter(true);
 
         // Ensure the params map has the expected values.
         final Map<String, String> expectedParams = new HashMap<>();
-        expectedParams.put(KafkaExportParameters.CONF_EXPORT_TO_KAFKA, "true");
-        assertTrue(kafkaParams.isExportToKafka());
+        expectedParams.put(KafkaBindingSetExporterParameters.CONF_USE_KAFKA_BINDING_SET_EXPORTER, "true");
+        assertTrue(kafkaParams.getUseKafkaBindingSetExporter());
         assertEquals(expectedParams, params);
 
         // now go the other way.
-        expectedParams.put(KafkaExportParameters.CONF_EXPORT_TO_KAFKA, "false");
-        kafkaParams.setExportToKafka(false);
-        assertFalse(kafkaParams.isExportToKafka());
+        expectedParams.put(KafkaBindingSetExporterParameters.CONF_USE_KAFKA_BINDING_SET_EXPORTER, "false");
+        kafkaParams.setUseKafkaBindingSetExporter(false);
+        assertFalse(kafkaParams.getUseKafkaBindingSetExporter());
         assertEquals(expectedParams, params);
     }
     @Test
@@ -68,7 +69,7 @@ public class KafkaExportParametersTest {
         // Make sure export key1 is NOT kept separate from producer config key1
         // This is a change, originally they were kept separate.
         params.put(key1, value1First);
-        final KafkaExportParameters kafkaParams = new KafkaExportParameters(params);
+        final KafkaExportParameterBase kafkaParams = new KafkaExportParameterBase(params);
         // Load some values into the properties using the wrapper.
         Properties props = new Properties();
         props.put(key1, value1Second);
@@ -87,8 +88,8 @@ public class KafkaExportParametersTest {
         final Map<String, String> params = new HashMap<>();
 
         // Ensure an unconfigured parameters map will say kafka export is disabled.
-        final KafkaExportParameters kafkaParams = new KafkaExportParameters(params);
-        assertFalse(kafkaParams.isExportToKafka());
+        final KafkaBindingSetExporterParameters kafkaParams = new KafkaBindingSetExporterParameters(params);
+        assertFalse(kafkaParams.getUseKafkaBindingSetExporter());
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParametersTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParametersTest.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParametersTest.java
index 9ac5139..5653312 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParametersTest.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParametersTest.java
@@ -37,7 +37,7 @@ public class RyaExportParametersTest {
 
         // Load some values into the params using the wrapper.
         final RyaExportParameters ryaParams = new RyaExportParameters(params);
-        ryaParams.setExportToRya(true);
+        ryaParams.setUseRyaBindingSetExporter(true);
         ryaParams.setAccumuloInstanceName("demoAccumulo");
         ryaParams.setZookeeperServers("zoo1;zoo2");
         ryaParams.setExporterUsername("fluo");
@@ -45,7 +45,7 @@ public class RyaExportParametersTest {
 
         // Ensure the params map has the expected values.
         final Map<String, String> expectedParams = new HashMap<>();
-        expectedParams.put(RyaExportParameters.CONF_EXPORT_TO_RYA, "true");
+        expectedParams.put(RyaExportParameters.CONF_USE_RYA_BINDING_SET_EXPORTER, "true");
         expectedParams.put(RyaExportParameters.CONF_ACCUMULO_INSTANCE_NAME, "demoAccumulo");
         expectedParams.put(RyaExportParameters.CONF_ZOOKEEPER_SERVERS, "zoo1;zoo2");
         expectedParams.put(RyaExportParameters.CONF_EXPORTER_USERNAME, "fluo");
@@ -60,6 +60,6 @@ public class RyaExportParametersTest {
 
         // Ensure an unconfigured parameters map will say rya export is disabled.
         final RyaExportParameters ryaParams = new RyaExportParameters(params);
-        assertFalse(ryaParams.isExportToRya());
+        assertFalse(ryaParams.getUseRyaBindingSetExporter());
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/PeriodicQueryUtilTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/PeriodicQueryUtilTest.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/PeriodicQueryUtilTest.java
index b40ba3f..55455a7 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/PeriodicQueryUtilTest.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/PeriodicQueryUtilTest.java
@@ -154,7 +154,7 @@ public class PeriodicQueryUtilTest {
     }
     
     @Test
-    public void testFluoQueryVarOrders() throws MalformedQueryException {
+    public void testFluoQueryVarOrders() throws MalformedQueryException, UnsupportedQueryException {
         String query = "prefix function: <http://org.apache.rya/function#> " //n
                 + "prefix time: <http://www.w3.org/2006/time#> " //n
                 + "select (count(?obs) as ?total) where {" //n

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadataVisitorTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadataVisitorTest.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadataVisitorTest.java
index 5c89a75..48f2f39 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadataVisitorTest.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/QueryMetadataVisitorTest.java
@@ -29,7 +29,7 @@ import org.junit.Test;
 public class QueryMetadataVisitorTest {
 
     @Test
-    public void builderTest() {
+    public void builderTest() throws UnsupportedQueryException {
         String query = "prefix function: <http://org.apache.rya/function#> " // n
                 + "prefix time: <http://www.w3.org/2006/time#> " // n
                 + "select ?id (count(?obs) as ?total) where {" // n

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClient.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClient.java b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClient.java
index 901f39d..cc74f6b 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClient.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClient.java
@@ -43,6 +43,7 @@ import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.fluo.client.PcjAdminClientCommand.ArgumentsException;
 import org.apache.rya.indexing.pcj.fluo.client.PcjAdminClientCommand.ExecutionException;
 import org.apache.rya.indexing.pcj.fluo.client.command.CountUnprocessedStatementsCommand;
@@ -152,6 +153,9 @@ public class PcjAdminClient {
             System.err.println("Could not execute the command.");
             e.printStackTrace();
             System.exit(-1);
+        } catch (UnsupportedQueryException e) {
+            System.err.println("Could not execute the command because the query is invalid.");
+            e.printStackTrace();
         } finally {
             log.trace("Shutting down the PCJ Admin Client.");
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClientCommand.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClientCommand.java b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClientCommand.java
index 2b3b105..a944b33 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClientCommand.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/PcjAdminClientCommand.java
@@ -24,6 +24,7 @@ import edu.umd.cs.findbugs.annotations.NonNull;
 import org.apache.accumulo.core.client.Connector;
 
 import org.apache.fluo.api.client.FluoClient;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.rdftriplestore.RyaSailRepository;
 
 /**
@@ -57,13 +58,14 @@ public interface PcjAdminClientCommand {
      * @param rya - A connection to the Rya instance used to search for historic PCJ matches. (not null)
      * @param client - A connection to the Fluo app that is updating the PCJs. (not null)
      * @param args - Command line arguments that configure how the command will execute. (not null)
+     * @throws UnsupportedQueryException 
      */
     public void execute(
             final Connector accumulo,
             final String ryaTablePrefix,
             final RyaSailRepository rya,
             final FluoClient fluo,
-            final String[] args) throws ArgumentsException, ExecutionException;
+            final String[] args) throws ArgumentsException, ExecutionException, UnsupportedQueryException;
 
     /**
      * A {@link PcjAdminClientCommand} could not be executed because of a problem with

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/NewQueryCommand.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/NewQueryCommand.java b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/NewQueryCommand.java
index 3f335f4..78515d9 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/NewQueryCommand.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/NewQueryCommand.java
@@ -42,6 +42,7 @@ import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 import org.apache.rya.accumulo.query.AccumuloRyaQueryEngine;
 import org.apache.rya.api.persist.RyaDAOException;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.fluo.client.PcjAdminClientCommand;
 import org.apache.rya.indexing.pcj.fluo.client.util.ParsedQueryRequest;
 import org.apache.rya.indexing.pcj.storage.PcjException;
@@ -94,7 +95,7 @@ public class NewQueryCommand implements PcjAdminClientCommand {
     }
 
     @Override
-    public void execute(final Connector accumulo, final String ryaTablePrefix, final RyaSailRepository rya, final FluoClient fluo, final String[] args) throws ArgumentsException, ExecutionException {
+    public void execute(final Connector accumulo, final String ryaTablePrefix, final RyaSailRepository rya, final FluoClient fluo, final String[] args) throws ArgumentsException, ExecutionException, UnsupportedQueryException {
         checkNotNull(accumulo);
         checkNotNull(fluo);
         checkNotNull(args);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/QueryReportCommand.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/QueryReportCommand.java b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/QueryReportCommand.java
index 675a844..2a7f787 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/QueryReportCommand.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/command/QueryReportCommand.java
@@ -26,6 +26,7 @@ import org.apache.logging.log4j.Logger;
 import org.apache.rya.indexing.pcj.fluo.client.PcjAdminClientCommand;
 import org.apache.rya.indexing.pcj.fluo.api.GetQueryReport;
 import org.apache.rya.indexing.pcj.fluo.api.GetQueryReport.QueryReport;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.fluo.client.util.QueryReportRenderer;
 
 import com.beust.jcommander.JCommander;
@@ -69,7 +70,7 @@ public class QueryReportCommand implements PcjAdminClientCommand {
     }
 
     @Override
-    public void execute(final Connector accumulo, final String ryaTablePrefix, final RyaSailRepository rya, final FluoClient fluo, final String[] args) throws ArgumentsException, ExecutionException {
+    public void execute(final Connector accumulo, final String ryaTablePrefix, final RyaSailRepository rya, final FluoClient fluo, final String[] args) throws ArgumentsException, ExecutionException, UnsupportedQueryException {
         checkNotNull(accumulo);
         checkNotNull(ryaTablePrefix);
         checkNotNull(rya);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/util/QueryReportRenderer.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/util/QueryReportRenderer.java b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/util/QueryReportRenderer.java
index f44db6c..d1b3e25 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/util/QueryReportRenderer.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.client/src/main/java/org/apache/rya/indexing/pcj/fluo/client/util/QueryReportRenderer.java
@@ -20,12 +20,9 @@ package org.apache.rya.indexing.pcj.fluo.client.util;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
-import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
-import edu.umd.cs.findbugs.annotations.NonNull;
-
 import org.apache.commons.lang3.StringUtils;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.indexing.pcj.fluo.api.GetQueryReport.QueryReport;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.query.ConstructQueryMetadata;
 import org.apache.rya.indexing.pcj.fluo.app.query.FilterMetadata;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery;
@@ -38,6 +35,9 @@ import org.openrdf.query.parser.ParsedQuery;
 import org.openrdf.query.parser.sparql.SPARQLParser;
 import org.openrdf.queryrender.sparql.SPARQLQueryRenderer;
 
+import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
+import edu.umd.cs.findbugs.annotations.NonNull;
+
 /**
  * Pretty renders a {@link QueryReport}.
  */
@@ -70,7 +70,7 @@ public class QueryReportRenderer {
         
         
         
-        if (metadata.getQueryType() == QueryType.Construct) {
+        if (metadata.getQueryType() == QueryType.CONSTRUCT) {
             builder.appendItem( new ReportItem("") );
             
             final ConstructQueryMetadata constructMetadata = metadata.getConstructQueryMetadata().get();

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/DemoDriver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/DemoDriver.java b/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/DemoDriver.java
index e8f10b8..1ae02dd 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/DemoDriver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/DemoDriver.java
@@ -304,7 +304,7 @@ public class DemoDriver {
         // Provide export parameters child test classes may provide to the export observer.
         final HashMap<String, String> exportParams = new HashMap<>();
         final RyaExportParameters ryaParams = new RyaExportParameters(exportParams);
-        ryaParams.setExportToRya(true);
+        ryaParams.setUseRyaBindingSetExporter(true);
         ryaParams.setAccumuloInstanceName(accumulo.getInstanceName());
         ryaParams.setZookeeperServers(accumulo.getZooKeepers());
         ryaParams.setExporterUsername("root");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java b/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
index f25b573..4070849 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
@@ -18,7 +18,6 @@
  */
 package org.apache.rya.indexing.pcj.fluo.demo;
 
-import java.io.IOException;
 import java.util.Set;
 
 import org.apache.accumulo.core.client.Connector;
@@ -35,22 +34,20 @@ import org.apache.rya.api.persist.RyaDAOException;
 import org.apache.rya.api.resolver.RyaToRdfConversions;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
 import org.apache.rya.indexing.pcj.fluo.api.InsertTriples;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.storage.PcjException;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.CloseableIterator;
-import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException;
 import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPcjStorage;
 import org.apache.rya.rdftriplestore.RyaSailRepository;
 import org.openrdf.model.Statement;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
 import org.openrdf.query.parser.ParsedQuery;
 import org.openrdf.query.parser.sparql.SPARQLParser;
 import org.openrdf.queryrender.sparql.SPARQLQueryRenderer;
 import org.openrdf.repository.RepositoryConnection;
 import org.openrdf.repository.RepositoryException;
-import org.openrdf.sail.SailException;
 
 import com.google.common.base.Optional;
 import com.google.common.collect.Sets;
@@ -181,7 +178,7 @@ public class FluoAndHistoricPcjsDemo implements Demo {
             // Tell the Fluo app to maintain it.
             new CreateFluoPcj().withRyaIntegration(pcjId, pcjStorage, fluoClient, accumuloConn, ryaTablePrefix);
 
-        } catch (MalformedQueryException | PcjException | RyaDAOException e) {
+        } catch (MalformedQueryException | PcjException | RyaDAOException | UnsupportedQueryException e) {
             throw new DemoExecutionException("Error while using Fluo to compute and export historic matches, so the demo can not continue. Exiting.", e);
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadataIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadataIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadataIT.java
index 263a19e..7676657 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadataIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadataIT.java
@@ -32,6 +32,7 @@ import org.apache.fluo.api.client.FluoFactory;
 import org.apache.rya.api.persist.RyaDAOException;
 import org.apache.rya.indexing.pcj.fluo.api.GetPcjMetadata.NotInAccumuloException;
 import org.apache.rya.indexing.pcj.fluo.api.GetPcjMetadata.NotInFluoException;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.storage.PcjException;
 import org.apache.rya.indexing.pcj.storage.PcjMetadata;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
@@ -53,7 +54,7 @@ import com.google.common.collect.Sets;
 public class GetPcjMetadataIT extends RyaExportITBase {
 
     @Test
-    public void getMetadataByQueryId() throws RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, PcjException, NotInFluoException, NotInAccumuloException, RyaDAOException {
+    public void getMetadataByQueryId() throws RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, PcjException, NotInFluoException, NotInAccumuloException, RyaDAOException, UnsupportedQueryException {
         final String sparql =
                 "SELECT ?x " +
                   "WHERE { " +
@@ -82,7 +83,7 @@ public class GetPcjMetadataIT extends RyaExportITBase {
     }
 
     @Test
-    public void getAllMetadata() throws MalformedQueryException, SailException, QueryEvaluationException, PcjException, NotInFluoException, NotInAccumuloException, AccumuloException, AccumuloSecurityException, RyaDAOException {
+    public void getAllMetadata() throws MalformedQueryException, SailException, QueryEvaluationException, PcjException, NotInFluoException, NotInAccumuloException, AccumuloException, AccumuloSecurityException, RyaDAOException, UnsupportedQueryException {
         final Connector accumuloConn = super.getAccumuloConnector();
         final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(accumuloConn, getRyaInstanceName());
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIdsIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIdsIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIdsIT.java
index e3914bd..3310690 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIdsIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIdsIT.java
@@ -18,7 +18,7 @@
  */
 package org.apache.rya.indexing.pcj.fluo.api;
 
-import static org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns.QUERY_ID;
+import static org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns.QUERY_NODE_ID;
 import static org.junit.Assert.assertEquals;
 
 import java.util.List;
@@ -49,10 +49,10 @@ public class ListQueryIdsIT extends RyaExportITBase {
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
             // Store a few SPARQL/Query ID pairs in the Fluo table.
             try(Transaction tx = fluoClient.newTransaction()) {
-                tx.set("SPARQL_3", QUERY_ID, "ID_3");
-                tx.set("SPARQL_1", QUERY_ID, "ID_1");
-                tx.set("SPARQL_4", QUERY_ID, "ID_4");
-                tx.set("SPARQL_2", QUERY_ID, "ID_2");
+                tx.set("SPARQL_3", QUERY_NODE_ID, "ID_3");
+                tx.set("SPARQL_1", QUERY_NODE_ID, "ID_1");
+                tx.set("SPARQL_4", QUERY_NODE_ID, "ID_4");
+                tx.set("SPARQL_2", QUERY_NODE_ID, "ID_2");
                 tx.commit();
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAOIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAOIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAOIT.java
index 315dddb..45492de 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAOIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAOIT.java
@@ -29,9 +29,9 @@ import org.apache.fluo.api.client.FluoClient;
 import org.apache.fluo.api.client.FluoFactory;
 import org.apache.fluo.api.client.Snapshot;
 import org.apache.fluo.api.client.Transaction;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.ConstructGraph;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.ExportStrategy;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.NodeType;
 import org.apache.rya.indexing.pcj.fluo.app.query.AggregationMetadata.AggregationElement;
 import org.apache.rya.indexing.pcj.fluo.app.query.AggregationMetadata.AggregationType;
@@ -148,11 +148,11 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
         // Create the object that will be serialized.
         String queryId = NodeType.generateNewFluoIdForType(NodeType.QUERY);
         final QueryMetadata.Builder builder = QueryMetadata.builder(queryId);
-        builder.setQueryType(QueryType.Projection);
+        builder.setQueryType(QueryType.PROJECTION);
         builder.setVarOrder(new VariableOrder("y;s;d"));
         builder.setSparql("sparql string");
         builder.setChildNodeId("childNodeId");
-        builder.setExportStrategies(new HashSet<>(Arrays.asList(ExportStrategy.Kafka)));
+        builder.setExportStrategies(new HashSet<>(Arrays.asList(ExportStrategy.KAFKA)));
         final QueryMetadata originalMetadata = builder.build();
 
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
@@ -338,7 +338,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
     }
 
     @Test
-    public void fluoQueryTest() throws MalformedQueryException {
+    public void fluoQueryTest() throws MalformedQueryException, UnsupportedQueryException {
         final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO();
 
         // Create the object that will be serialized.
@@ -357,7 +357,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
         builder.setFluoQueryId(NodeType.generateNewFluoIdForType(NodeType.QUERY));
         final FluoQuery originalQuery = builder.build();
 
-        assertEquals(QueryType.Projection, originalQuery.getQueryType());
+        assertEquals(QueryType.PROJECTION, originalQuery.getQueryType());
         assertEquals(false, originalQuery.getConstructQueryMetadata().isPresent());
         
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
@@ -379,7 +379,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
     }
     
     @Test
-    public void fluoConstructQueryTest() throws MalformedQueryException {
+    public void fluoConstructQueryTest() throws MalformedQueryException, UnsupportedQueryException {
         final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO();
 
         // Create the object that will be serialized.
@@ -398,7 +398,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
         builder.setFluoQueryId(NodeType.generateNewFluoIdForType(NodeType.QUERY));
         final FluoQuery originalQuery = builder.build();
         
-        assertEquals(QueryType.Construct, originalQuery.getQueryType());
+        assertEquals(QueryType.CONSTRUCT, originalQuery.getQueryType());
         assertEquals(true, originalQuery.getConstructQueryMetadata().isPresent());
 
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
@@ -421,7 +421,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
     
     
     @Test
-    public void fluoNestedQueryTest() throws MalformedQueryException {
+    public void fluoNestedQueryTest() throws MalformedQueryException, UnsupportedQueryException {
         final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO();
 
         // Create the object that will be serialized.
@@ -442,7 +442,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
         builder.setFluoQueryId(NodeType.generateNewFluoIdForType(NodeType.QUERY));
         final FluoQuery originalQuery = builder.build();
         
-        assertEquals(QueryType.Projection, originalQuery.getQueryType());
+        assertEquals(QueryType.PROJECTION, originalQuery.getQueryType());
 
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
             // Write it to the Fluo table.
@@ -463,7 +463,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
     }
     
     @Test
-    public void fluoNestedConstructQueryTest() throws MalformedQueryException {
+    public void fluoNestedConstructQueryTest() throws MalformedQueryException, UnsupportedQueryException {
         final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO();
 
         // Create the object that will be serialized.
@@ -488,7 +488,7 @@ public class FluoQueryMetadataDAOIT extends RyaExportITBase {
         builder.setFluoQueryId(NodeType.generateNewFluoIdForType(NodeType.QUERY));
         final FluoQuery originalQuery = builder.build();
         
-        assertEquals(QueryType.Construct, originalQuery.getQueryType());
+        assertEquals(QueryType.CONSTRUCT, originalQuery.getQueryType());
 
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
             // Write it to the Fluo table.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java
index 32d0e41..47a2f29 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/BatchIT.java
@@ -53,6 +53,7 @@ import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.JoinMetadata.JoinType;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
 import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPcjStorage;
@@ -343,7 +344,7 @@ public class BatchIT extends RyaExportITBase {
         return statements;
     }
 
-    private List<String> getNodeIdStrings(FluoClient fluoClient, String queryId) {
+    private List<String> getNodeIdStrings(FluoClient fluoClient, String queryId) throws UnsupportedQueryException {
         List<String> nodeStrings;
         try (Snapshot sx = fluoClient.newSnapshot()) {
             FluoQuery query = dao.readFluoQuery(sx, queryId);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java
index 7c4caa4..a1d76cb 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/CreateDeleteIT.java
@@ -33,6 +33,7 @@ import org.apache.fluo.api.client.scanner.ColumnScanner;
 import org.apache.fluo.api.client.scanner.RowScanner;
 import org.apache.fluo.api.data.Bytes;
 import org.apache.fluo.api.data.Span;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.api.client.RyaClient;
 import org.apache.rya.api.client.accumulo.AccumuloRyaClientFactory;
 import org.apache.rya.indexing.pcj.fluo.api.DeleteFluoPcj;
@@ -79,7 +80,7 @@ public class CreateDeleteIT extends RyaExportITBase {
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
             // Ensure the data was loaded.
             final List<Bytes> rows = getFluoTableEntries(fluoClient);
-            assertEquals(20, rows.size());
+            assertEquals(18, rows.size());
 
             // Delete the PCJ from the Fluo application.
             new DeleteFluoPcj(1).deletePcj(fluoClient, pcjId);
@@ -111,7 +112,7 @@ public class CreateDeleteIT extends RyaExportITBase {
         try(FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
             // Ensure the data was loaded.
             final List<Bytes> rows = getFluoTableEntries(fluoClient);
-            assertEquals(12, rows.size());
+            assertEquals(10, rows.size());
 
             // Delete the PCJ from the Fluo application.
             new DeleteFluoPcj(1).deletePcj(fluoClient, pcjId);
@@ -130,7 +131,7 @@ public class CreateDeleteIT extends RyaExportITBase {
         // Register the PCJ with Rya.
         final RyaClient ryaClient = AccumuloRyaClientFactory.build(createConnectionDetails(), getAccumuloConnector());
 
-        final String pcjId = ryaClient.getCreatePCJ().createPCJ(getRyaInstanceName(), sparql);
+        final String pcjId = ryaClient.getCreatePCJ().createPCJ(getRyaInstanceName(), sparql, Sets.newHashSet(ExportStrategy.NO_OP_EXPORT));
 
         // Write the data to Rya.
         final SailRepositoryConnection ryaConn = super.getRyaSailRepository().getConnection();

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java
index f9f55d0..8911f56 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaExportIT.java
@@ -92,6 +92,8 @@ public class KafkaExportIT extends KafkaExportITBase {
         // Create the PCJ in Fluo and load the statements into Rya.
         final String pcjId = loadData(sparql, statements);
 
+        FluoITHelper.printFluoTable(super.getFluoConfiguration());
+        
         // The expected results of the SPARQL query once the PCJ has been computed.
         final Set<BindingSet> expectedResult = new HashSet<>();
 
@@ -590,9 +592,9 @@ public class KafkaExportIT extends KafkaExportITBase {
         // Read all of the results from the Kafka topic.
         final Set<VisibilityBindingSet> results = new HashSet<>();
 
-        try(final KafkaConsumer<Integer, VisibilityBindingSet> consumer = makeConsumer(pcjId)) {
-            final ConsumerRecords<Integer, VisibilityBindingSet> records = consumer.poll(5000);
-            final Iterator<ConsumerRecord<Integer, VisibilityBindingSet>> recordIterator = records.iterator();
+        try(final KafkaConsumer<String, VisibilityBindingSet> consumer = makeConsumer(pcjId)) {
+            final ConsumerRecords<String, VisibilityBindingSet> records = consumer.poll(5000);
+            final Iterator<ConsumerRecord<String, VisibilityBindingSet>> recordIterator = records.iterator();
             while (recordIterator.hasNext()) {
                 results.add( recordIterator.next().value() );
             }
@@ -607,9 +609,9 @@ public class KafkaExportIT extends KafkaExportITBase {
         // Read the results from the Kafka topic. The last one has the final aggregation result.
         VisibilityBindingSet result = null;
 
-        try(final KafkaConsumer<Integer, VisibilityBindingSet> consumer = makeConsumer(pcjId)) {
-            final ConsumerRecords<Integer, VisibilityBindingSet> records = consumer.poll(5000);
-            final Iterator<ConsumerRecord<Integer, VisibilityBindingSet>> recordIterator = records.iterator();
+        try(final KafkaConsumer<String, VisibilityBindingSet> consumer = makeConsumer(pcjId)) {
+            final ConsumerRecords<String, VisibilityBindingSet> records = consumer.poll(5000);
+            final Iterator<ConsumerRecord<String, VisibilityBindingSet>> recordIterator = records.iterator();
             while (recordIterator.hasNext()) {
                 result = recordIterator.next().value();
             }
@@ -625,9 +627,9 @@ public class KafkaExportIT extends KafkaExportITBase {
         // The key in this map is a Binding Set containing only the group by variables.
         final Map<BindingSet, VisibilityBindingSet> results = new HashMap<>();
 
-        try(final KafkaConsumer<Integer, VisibilityBindingSet> consumer = makeConsumer(pcjId)) {
-            final ConsumerRecords<Integer, VisibilityBindingSet> records = consumer.poll(5000);
-            final Iterator<ConsumerRecord<Integer, VisibilityBindingSet>> recordIterator = records.iterator();
+        try(final KafkaConsumer<String, VisibilityBindingSet> consumer = makeConsumer(pcjId)) {
+            final ConsumerRecords<String, VisibilityBindingSet> records = consumer.poll(5000);
+            final Iterator<ConsumerRecord<String, VisibilityBindingSet>> recordIterator = records.iterator();
             while (recordIterator.hasNext()) {
                 final VisibilityBindingSet visBindingSet = recordIterator.next().value();
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaRyaSubGraphExportIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaRyaSubGraphExportIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaRyaSubGraphExportIT.java
index ca8de0d..b2944ca 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaRyaSubGraphExportIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/KafkaRyaSubGraphExportIT.java
@@ -33,13 +33,12 @@ import java.util.stream.Collectors;
 import org.apache.fluo.api.client.FluoClient;
 import org.apache.fluo.api.config.ObserverSpecification;
 import org.apache.fluo.core.client.FluoClientImpl;
+import org.apache.fluo.recipes.test.FluoITHelper;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.serialization.StringSerializer;
 import org.apache.rya.accumulo.AccumuloRyaDAO;
 import org.apache.rya.api.domain.RyaStatement;
 import org.apache.rya.api.domain.RyaSubGraph;
@@ -48,13 +47,14 @@ import org.apache.rya.api.domain.RyaURI;
 import org.apache.rya.api.resolver.RdfToRyaConversions;
 import org.apache.rya.indexing.pcj.fluo.ConstructGraphTestUtils;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
-import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaExportParameters;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaSubGraphExporterParameters;
 import org.apache.rya.indexing.pcj.fluo.app.export.kafka.RyaSubGraphKafkaSerDe;
 import org.apache.rya.indexing.pcj.fluo.app.observers.AggregationObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.ConstructQueryResultObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.FilterObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.JoinObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.ProjectionObserver;
+import org.apache.rya.indexing.pcj.fluo.app.observers.QueryResultObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.StatementPatternObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.TripleObserver;
 import org.apache.rya.pcj.fluo.test.base.KafkaExportITBase;
@@ -88,22 +88,18 @@ public class KafkaRyaSubGraphExportIT extends KafkaExportITBase {
         observers.add(new ObserverSpecification(FilterObserver.class.getName()));
         observers.add(new ObserverSpecification(AggregationObserver.class.getName()));
         observers.add(new ObserverSpecification(ProjectionObserver.class.getName()));
+        observers.add(new ObserverSpecification(ConstructQueryResultObserver.class.getName()));
+        
 
         // Configure the export observer to export new PCJ results to the mini
         // accumulo cluster.
         final HashMap<String, String> exportParams = new HashMap<>();
 
-        final KafkaExportParameters kafkaParams = new KafkaExportParameters(exportParams);
-        kafkaParams.setExportToKafka(true);
-
-        // Configure the Kafka Producer
-        final Properties producerConfig = new Properties();
-        producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKERHOST + ":" + BROKERPORT);
-        producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
-        producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, RyaSubGraphKafkaSerDe.class.getName());
-        kafkaParams.addAllProducerConfig(producerConfig);
+        final KafkaSubGraphExporterParameters kafkaParams = new KafkaSubGraphExporterParameters(exportParams);
+        kafkaParams.setUseKafkaSubgraphExporter(true);
+        kafkaParams.setKafkaBootStrapServers(BROKERHOST + ":" + BROKERPORT);
 
-        final ObserverSpecification exportObserverConfig = new ObserverSpecification(ConstructQueryResultObserver.class.getName(),
+        final ObserverSpecification exportObserverConfig = new ObserverSpecification(QueryResultObserver.class.getName(),
                 exportParams);
         observers.add(exportObserverConfig);
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
index 6ecec02..0aefaca 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
@@ -34,9 +34,11 @@ import javax.xml.datatype.DatatypeFactory;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.fluo.api.client.FluoClient;
 import org.apache.fluo.core.client.FluoClientImpl;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.api.client.RyaClient;
 import org.apache.rya.api.client.accumulo.AccumuloRyaClientFactory;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.CloseableIterator;
@@ -877,7 +879,7 @@ public class QueryIT extends RyaExportITBase {
         runTest(query, statements, expectedResults, ExporterType.Periodic);
     }
 
-    @Test(expected= IllegalArgumentException.class)
+    @Test(expected= UnsupportedQueryException.class)
     public void nestedConstructPeriodicQueryWithAggregationAndGroupBy() throws Exception {
         String query = "prefix function: <http://org.apache.rya/function#> " // n
                 + "prefix time: <http://www.w3.org/2006/time#> " // n
@@ -924,7 +926,7 @@ public class QueryIT extends RyaExportITBase {
             PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(accumuloConn, getRyaInstanceName());
             String periodicId = periodicStorage.createPeriodicQuery(sparql);
             try (FluoClient fluo = new FluoClientImpl(super.getFluoConfiguration())) {
-                new CreateFluoPcj().createPcj(periodicId, sparql, fluo);
+                new CreateFluoPcj().createPcj(periodicId, sparql, Sets.newHashSet(ExportStrategy.RYA), fluo);
             }
             addStatementsAndWait(statements);
             

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java
index c828a20..ed9ce60 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/KafkaExportITBase.java
@@ -37,25 +37,25 @@ import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.minicluster.MiniAccumuloCluster;
 import org.apache.fluo.api.config.ObserverSpecification;
 import org.apache.fluo.recipes.test.AccumuloExportITBase;
-import org.apache.fluo.recipes.test.FluoITHelper;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.common.serialization.StringDeserializer;
 import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 import org.apache.rya.accumulo.AccumuloRyaDAO;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.api.client.Install.InstallConfiguration;
 import org.apache.rya.api.client.RyaClient;
 import org.apache.rya.api.client.accumulo.AccumuloConnectionDetails;
 import org.apache.rya.api.client.accumulo.AccumuloRyaClientFactory;
 import org.apache.rya.indexing.accumulo.ConfigUtils;
 import org.apache.rya.indexing.external.PrecomputedJoinIndexerConfig;
-import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaExportParameters;
-import org.apache.rya.indexing.pcj.fluo.app.export.kafka.RyaSubGraphKafkaSerDe;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaBindingSetExporterParameters;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaSubGraphExporterParameters;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KryoVisibilityBindingSetSerializer;
 import org.apache.rya.indexing.pcj.fluo.app.observers.AggregationObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.ConstructQueryResultObserver;
 import org.apache.rya.indexing.pcj.fluo.app.observers.FilterObserver;
@@ -74,6 +74,8 @@ import org.openrdf.model.Statement;
 import org.openrdf.repository.sail.SailRepositoryConnection;
 import org.openrdf.sail.Sail;
 
+import com.google.common.collect.Sets;
+
 import kafka.admin.AdminUtils;
 import kafka.admin.RackAwareMode;
 import kafka.server.KafkaConfig;
@@ -119,41 +121,20 @@ public class KafkaExportITBase extends AccumuloExportITBase {
         observers.add(new ObserverSpecification(FilterObserver.class.getName()));
         observers.add(new ObserverSpecification(AggregationObserver.class.getName()));
         observers.add(new ObserverSpecification(ProjectionObserver.class.getName()));
+        observers.add(new ObserverSpecification(ConstructQueryResultObserver.class.getName()));
 
         // Configure the export observer to export new PCJ results to the mini
         // accumulo cluster.
         final HashMap<String, String> exportParams = new HashMap<>();
-
-        final KafkaExportParameters kafkaParams = new KafkaExportParameters(exportParams);
-        kafkaParams.setExportToKafka(true);
-
-        // Configure the Kafka Producer
-        final Properties producerConfig = new Properties();
-        producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKERHOST + ":" + BROKERPORT);
-        producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
-        producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
-                "org.apache.rya.indexing.pcj.fluo.app.export.kafka.KryoVisibilityBindingSetSerializer");
-        kafkaParams.addAllProducerConfig(producerConfig);
+        final KafkaBindingSetExporterParameters kafkaParams = new KafkaBindingSetExporterParameters(exportParams);
+        kafkaParams.setUseKafkaBindingSetExporter(true);
+        kafkaParams.setKafkaBootStrapServers(BROKERHOST + ":" + BROKERPORT);
+        
+        final KafkaSubGraphExporterParameters kafkaConstructParams = new KafkaSubGraphExporterParameters(exportParams);
+        kafkaConstructParams.setUseKafkaSubgraphExporter(true);
 
         final ObserverSpecification exportObserverConfig = new ObserverSpecification(QueryResultObserver.class.getName(), exportParams);
         observers.add(exportObserverConfig);
-        
-        //create construct query observer and tell it not to export to Kafka
-        //it will only add results back into Fluo
-        HashMap<String, String> constructParams = new HashMap<>();
-        final KafkaExportParameters kafkaConstructParams = new KafkaExportParameters(constructParams);
-        kafkaConstructParams.setExportToKafka(true);
-        
-        // Configure the Kafka Producer
-        final Properties constructProducerConfig = new Properties();
-        constructProducerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKERHOST + ":" + BROKERPORT);
-        constructProducerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
-        constructProducerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, RyaSubGraphKafkaSerDe.class.getName());
-        kafkaConstructParams.addAllProducerConfig(constructProducerConfig);
-
-        final ObserverSpecification constructExportObserverConfig = new ObserverSpecification(ConstructQueryResultObserver.class.getName(),
-                constructParams);
-        observers.add(constructExportObserverConfig);
 
         // Add the observers to the Fluo Configuration.
         super.getFluoConfiguration().addObservers(observers);
@@ -323,21 +304,19 @@ public class KafkaExportITBase extends AccumuloExportITBase {
         consumer.close();
     }
 
-    protected KafkaConsumer<Integer, VisibilityBindingSet> makeConsumer(final String TopicName) {
+    protected KafkaConsumer<String, VisibilityBindingSet> makeConsumer(final String TopicName) {
         // setup consumer
         final Properties consumerProps = new Properties();
         consumerProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKERHOST + ":" + BROKERPORT);
         consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "group0");
         consumerProps.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, "consumer0");
-        consumerProps.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
-                "org.apache.kafka.common.serialization.IntegerDeserializer");
-        consumerProps.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
-                "org.apache.rya.indexing.pcj.fluo.app.export.kafka.KryoVisibilityBindingSetSerializer");
+        consumerProps.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+        consumerProps.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KryoVisibilityBindingSetSerializer.class.getName());
 
         // to make sure the consumer starts from the beginning of the topic
         consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
 
-        final KafkaConsumer<Integer, VisibilityBindingSet> consumer = new KafkaConsumer<>(consumerProps);
+        final KafkaConsumer<String, VisibilityBindingSet> consumer = new KafkaConsumer<>(consumerProps);
         consumer.subscribe(Arrays.asList(TopicName));
         return consumer;
     }
@@ -353,7 +332,7 @@ public class KafkaExportITBase extends AccumuloExportITBase {
         final RyaClient ryaClient = AccumuloRyaClientFactory.build(new AccumuloConnectionDetails(ACCUMULO_USER,
                 ACCUMULO_PASSWORD.toCharArray(), accInstance.getInstanceName(), accInstance.getZooKeepers()), accumuloConn);
 
-        final String pcjId = ryaClient.getCreatePCJ().createPCJ(RYA_INSTANCE_NAME, sparql);
+        final String pcjId = ryaClient.getCreatePCJ().createPCJ(RYA_INSTANCE_NAME, sparql, Sets.newHashSet(ExportStrategy.KAFKA));
 
         // Write the data to Rya.
         final SailRepositoryConnection ryaConn = getRyaSailRepository().getConnection();

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/RyaExportITBase.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/RyaExportITBase.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/RyaExportITBase.java
index 9c5732f..1c02db3 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/RyaExportITBase.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/pcj/fluo/test/base/RyaExportITBase.java
@@ -65,7 +65,8 @@ public class RyaExportITBase extends FluoITBase {
         // Configure the export observer to export new PCJ results to the mini accumulo cluster.
         final HashMap<String, String> exportParams = new HashMap<>();
         final RyaExportParameters ryaParams = new RyaExportParameters(exportParams);
-        ryaParams.setExportToRya(true);
+        ryaParams.setUseRyaBindingSetExporter(true);
+        ryaParams.setUsePeriodicBindingSetExporter(true);
         ryaParams.setRyaInstanceName(getRyaInstanceName());
         ryaParams.setAccumuloInstanceName(super.getMiniAccumuloCluster().getInstanceName());
         ryaParams.setZookeeperServers(super.getMiniAccumuloCluster().getZooKeepers());

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java
index 4d1bc75..cf24974 100644
--- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java
+++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationProviderIT.java
@@ -26,6 +26,7 @@ import org.apache.fluo.api.client.FluoClient;
 import org.apache.fluo.core.client.FluoClientImpl;
 import org.apache.fluo.recipes.test.AccumuloExportITBase;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.apache.rya.periodic.notification.coordinator.PeriodicNotificationCoordinatorExecutor;
 import org.apache.rya.periodic.notification.notification.TimestampedNotification;
@@ -38,7 +39,7 @@ import org.junit.Assert;
 public class PeriodicNotificationProviderIT extends AccumuloExportITBase {
 
     @Test
-    public void testProvider() throws MalformedQueryException, InterruptedException {
+    public void testProvider() throws MalformedQueryException, InterruptedException, UnsupportedQueryException {
         
         String sparql = "prefix function: <http://org.apache.rya/function#> " // n
                 + "prefix time: <http://www.w3.org/2006/time#> " // n

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java
index 27acc9c..bb98b7f 100644
--- a/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java
+++ b/extras/rya.periodic.service/periodic.service.integration.tests/src/test/java/org/apache/rya/periodic/notification/pruner/PeriodicNotificationBinPrunerIT.java
@@ -38,12 +38,10 @@ import org.apache.fluo.api.data.Bytes;
 import org.apache.fluo.api.data.ColumnValue;
 import org.apache.fluo.api.data.Span;
 import org.apache.fluo.core.client.FluoClientImpl;
-import org.apache.fluo.recipes.test.FluoITHelper;
 import org.apache.rya.api.resolver.RdfToRyaConversions;
 import org.apache.rya.indexing.pcj.fluo.api.InsertTriples;
 import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants;
 import org.apache.rya.indexing.pcj.fluo.app.NodeType;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
 import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil;
 import org.apache.rya.indexing.pcj.fluo.app.util.RowKeyUtil;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
@@ -252,14 +250,14 @@ public class PeriodicNotificationBinPrunerIT extends RyaExportITBase {
         }
     }
     
-    private void compareFluoCounts(FluoClient client, String queryId, long bin) {
+    private void compareFluoCounts(FluoClient client, String pcjId, long bin) {
         QueryBindingSet bs = new QueryBindingSet();
         bs.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, new LiteralImpl(Long.toString(bin), XMLSchema.LONG));
         
         VariableOrder varOrder = new VariableOrder(IncrementalUpdateConstants.PERIODIC_BIN_ID);
         
         try(Snapshot sx = client.newSnapshot()) {
-            String fluoQueryId = sx.get(Bytes.of(queryId), FluoQueryColumns.PCJ_ID_QUERY_ID).toString();
+            String fluoQueryId = NodeType.generateNewIdForType(NodeType.QUERY, pcjId);
             Set<String> ids = new HashSet<>();
             PeriodicQueryUtil.getPeriodicQueryNodeAncestorIds(sx, fluoQueryId, ids);
             for(String id: ids) {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java
----------------------------------------------------------------------
diff --git a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java
index 6aade52..60a3e7c 100644
--- a/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java
+++ b/extras/rya.periodic.service/periodic.service.notification/src/main/java/org/apache/rya/periodic/notification/api/CreatePeriodicQuery.java
@@ -21,8 +21,10 @@ package org.apache.rya.periodic.notification.api;
 import java.util.Optional;
 
 import org.apache.fluo.api.client.FluoClient;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
 import org.apache.rya.indexing.pcj.fluo.app.query.PeriodicQueryNode;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
@@ -32,7 +34,7 @@ import org.apache.rya.periodic.notification.notification.PeriodicNotification;
 import org.openrdf.query.MalformedQueryException;
 import org.openrdf.query.algebra.evaluation.function.Function;
 
-import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
 
 /**
  * Object that creates a Periodic Query.  A Periodic Query is any query
@@ -82,17 +84,22 @@ public class CreatePeriodicQuery {
             Optional<PeriodicQueryNode> optNode = PeriodicQueryUtil.getPeriodicNode(sparql);
             if(optNode.isPresent()) {
                 PeriodicQueryNode periodicNode = optNode.get();
+                String pcjId = FluoQueryUtils.createNewPcjId();
+               
+                //register query with Fluo
                 CreateFluoPcj createPcj = new CreateFluoPcj();
-                String queryId = createPcj.createPcj(sparql, fluoClient).getQueryId();
-                queryId = FluoQueryUtils.convertFluoQueryIdToPcjId(queryId);
-                periodicStorage.createPeriodicQuery(queryId, sparql);
-                PeriodicNotification notification = PeriodicNotification.builder().id(queryId).period(periodicNode.getPeriod())
+                createPcj.createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.RYA), fluoClient);
+                
+                //register query with PeriodicResultStorage table
+                periodicStorage.createPeriodicQuery(pcjId, sparql);
+                //create notification
+                PeriodicNotification notification = PeriodicNotification.builder().id(pcjId).period(periodicNode.getPeriod())
                         .timeUnit(periodicNode.getUnit()).build();
                 return notification;
             } else {
                 throw new RuntimeException("Invalid PeriodicQuery.  Query must possess a PeriodicQuery Filter.");
             }
-        } catch (MalformedQueryException | PeriodicQueryStorageException e) {
+        } catch (MalformedQueryException | PeriodicQueryStorageException | UnsupportedQueryException e) {
             throw new RuntimeException(e);
         }
     }


[4/5] incubator-rya git commit: RYA-246-Query-Export-Strategy. Closes #213.

Posted by ca...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterFactory.java
index 5507037..b796a6f 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterFactory.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterFactory.java
@@ -23,7 +23,8 @@ import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.log4j.Logger;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporterFactory;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 
 import com.google.common.base.Optional;
@@ -44,13 +45,13 @@ import com.google.common.base.Optional;
  * 
  * @see ProducerConfig
  */
-public class KafkaBindingSetExporterFactory implements IncrementalBindingSetExporterFactory {
+public class KafkaBindingSetExporterFactory implements IncrementalResultExporterFactory {
     private static final Logger log = Logger.getLogger(KafkaBindingSetExporterFactory.class);
     @Override
-    public Optional<IncrementalBindingSetExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException {
-        final KafkaExportParameters exportParams = new KafkaExportParameters(context.getObserverConfiguration().toMap());
-        log.debug("KafkaResultExporterFactory.build(): params.isExportToKafka()=" + exportParams.isExportToKafka());
-        if (exportParams.isExportToKafka()) {
+    public Optional<IncrementalResultExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException {
+        final KafkaBindingSetExporterParameters exportParams = new KafkaBindingSetExporterParameters(context.getObserverConfiguration().toMap());
+        log.debug("KafkaResultExporterFactory.build(): params.isExportToKafka()=" + exportParams.getUseKafkaBindingSetExporter());
+        if (exportParams.getUseKafkaBindingSetExporter()) {
             // Setup Kafka connection
             KafkaProducer<String, VisibilityBindingSet> producer = new KafkaProducer<String, VisibilityBindingSet>(exportParams.listAllConfig());
             // Create the exporter

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java
new file mode 100644
index 0000000..4550a50
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporterParameters.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.kafka;
+
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.common.serialization.StringSerializer;
+
+import com.google.common.base.Preconditions;
+
+
+public class KafkaBindingSetExporterParameters extends KafkaExportParameterBase {
+    
+    public static final String CONF_USE_KAFKA_BINDING_SET_EXPORTER = "pcj.fluo.export.kafka.bindingset.enabled";
+    public static final String CONF_KAFKA_BINDING_SET_SERIALIZER = "pcj.fluo.export.kafka.bindingset.serializer";
+
+    public KafkaBindingSetExporterParameters(final Map<String, String> params) {
+        super(params);
+    }
+    
+    /**
+     * Instructs the Fluo application to use the Kafka Binding Set Exporter
+     * and sets the appropriate Key/Value Serializer parameters for writing BindingSets to Kafka.
+     * @param useExporter
+     *            - {@code True} if the Fluo application should use the
+     *            {@link KafkaBindingSetExporter}; otherwise {@code false}.
+     */
+    public void setUseKafkaBindingSetExporter(final boolean useExporter) {
+        setBoolean(params, CONF_USE_KAFKA_BINDING_SET_EXPORTER, useExporter);
+    }
+
+    /**
+     * @return {@code True} if the Fluo application should use the {@link KafkaBindingSetExporter}; otherwise
+     *         {@code false}. Defaults to {@code false} if no value is present.
+     */
+    public boolean getUseKafkaBindingSetExporter() {
+        return getBoolean(params, CONF_USE_KAFKA_BINDING_SET_EXPORTER, false);
+    }
+    
+    /**
+     * 
+     * @param serializer - Used for Serializing BindingSets pushed to Kafka
+     */
+    public void setKafkaBindingSetSerializer(String serializer) {
+        params.put(CONF_KAFKA_BINDING_SET_SERIALIZER, Preconditions.checkNotNull(serializer));
+    }
+    
+    /**
+     * @return - Serializer used for Serializing BindingSets to Kafka
+     */
+    public String getKafkaBindingSetSerializer() {
+        return params.getOrDefault(CONF_KAFKA_BINDING_SET_SERIALIZER, KryoVisibilityBindingSetSerializer.class.getName());
+    }
+    
+    @Override
+    public Properties listAllConfig() {
+        Properties props = super.listAllConfig();
+        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
+        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getKafkaBindingSetSerializer());
+        return props;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java
new file mode 100644
index 0000000..aab3929
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.kafka;
+
+import java.util.Map;
+import java.util.Optional;
+import java.util.Properties;
+
+import org.apache.fluo.api.observer.Observer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.rya.indexing.pcj.fluo.app.export.ParametersBase;
+
+import jline.internal.Preconditions;
+
+/**
+ * Provides read/write functions to the parameters map that is passed into an
+ * {@link Observer#init(io.fluo.api.observer.Observer.Context)} method related
+ * to PCJ exporting to a kafka topic.
+ * Remember: if doesn't count unless it is added to params
+ */
+
+public class KafkaExportParameterBase extends ParametersBase {
+
+    public KafkaExportParameterBase(final Map<String, String> params) {
+        super(params);
+    }
+
+    /**
+     * Sets the bootstrap servers for reading from and writing to Kafka
+     * @param bootstrapServers - connect string for Kafka brokers
+     */
+    public void setKafkaBootStrapServers(String bootstrapServers) {
+        params.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Preconditions.checkNotNull(bootstrapServers));
+    }
+    
+    /**
+     * @return Connect string for Kafka servers
+     */
+    public Optional<String> getKafkaBootStrapServers() {
+        return Optional.ofNullable(params.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG));
+    }
+
+    /**
+     * Add the properties to the params, NOT keeping them separate from the other params.
+     * Guaranteed by Properties: Each key and its corresponding value in the property list is a string.
+     * 
+     * @param producerConfig
+     */
+    public void addAllProducerConfig(final Properties producerConfig) {
+        for (Object key : producerConfig.keySet().toArray()) {
+            Object value = producerConfig.getProperty(key.toString());
+            this.params.put(key.toString(), value.toString());
+        }
+    }
+
+    /**
+     * Collect all the properties
+     * 
+     * @return all the params (not just kafka producer Configuration) as a {@link Properties}
+     */
+    public Properties listAllConfig() {
+        Properties props = new Properties();
+        for (Object key : params.keySet().toArray()) {
+            Object value = params.get(key.toString());
+            props.put(key.toString(), value.toString());
+        }
+        return props;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameters.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameters.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameters.java
deleted file mode 100644
index 347a2e2..0000000
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameters.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.indexing.pcj.fluo.app.export.kafka;
-
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.fluo.api.observer.Observer;
-import org.apache.rya.indexing.pcj.fluo.app.export.ParametersBase;
-
-/**
- * Provides read/write functions to the parameters map that is passed into an
- * {@link Observer#init(io.fluo.api.observer.Observer.Context)} method related
- * to PCJ exporting to a kafka topic.
- * Remember: if doesn't count unless it is added to params
- */
-
-public class KafkaExportParameters extends ParametersBase {
-
-    public static final String CONF_EXPORT_TO_KAFKA = "pcj.fluo.export.kafka.enabled";
-
-    public KafkaExportParameters(final Map<String, String> params) {
-        super(params);
-    }
-
-    /**
-     * @param isExportToKafka
-     *            - {@code True} if the Fluo application should export
-     *            to Kafka; otherwise {@code false}.
-     */
-    public void setExportToKafka(final boolean isExportToKafka) {
-        setBoolean(params, CONF_EXPORT_TO_KAFKA, isExportToKafka);
-    }
-
-    /**
-     * @return {@code True} if the Fluo application should export to Kafka; otherwise
-     *         {@code false}. Defaults to {@code false} if no value is present.
-     */
-    public boolean isExportToKafka() {
-        return getBoolean(params, CONF_EXPORT_TO_KAFKA, false);
-    }
-
-    /**
-     * Add the properties to the params, NOT keeping them separate from the other params.
-     * Guaranteed by Properties: Each key and its corresponding value in the property list is a string.
-     * 
-     * @param producerConfig
-     */
-    public void addAllProducerConfig(final Properties producerConfig) {
-        for (Object key : producerConfig.keySet().toArray()) {
-            Object value = producerConfig.getProperty(key.toString());
-            this.params.put(key.toString(), value.toString());
-        }
-    }
-
-    /**
-     * Collect all the properties
-     * 
-     * @return all the params (not just kafka producer Configuration) as a {@link Properties}
-     */
-    public Properties listAllConfig() {
-        Properties props = new Properties();
-        for (Object key : params.keySet().toArray()) {
-            Object value = params.get(key.toString());
-            props.put(key.toString(), value.toString());
-        }
-        return props;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporter.java
index fa27b46..da26329 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporter.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporter.java
@@ -19,6 +19,7 @@ package org.apache.rya.indexing.pcj.fluo.app.export.kafka;
  */
 import static com.google.common.base.Preconditions.checkNotNull;
 
+import java.util.Set;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
@@ -26,8 +27,13 @@ import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.clients.producer.RecordMetadata;
 import org.apache.log4j.Logger;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.api.domain.RyaSubGraph;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter.ResultExportException;
+
+import com.google.common.collect.Sets;
+
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalRyaSubGraphExporter;
 
 /**
@@ -76,4 +82,14 @@ public class KafkaRyaSubGraphExporter implements IncrementalRyaSubGraphExporter
         producer.close(5, TimeUnit.SECONDS);
     }
 
+    @Override
+    public Set<QueryType> getQueryTypes() {
+        return Sets.newHashSet(QueryType.CONSTRUCT);
+    }
+
+    @Override
+    public ExportStrategy getExportStrategy() {
+        return ExportStrategy.KAFKA;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporterFactory.java
index 2c1e4c0..60e9294 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporterFactory.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaRyaSubGraphExporterFactory.java
@@ -21,10 +21,9 @@ import org.apache.fluo.api.observer.Observer.Context;
 import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.log4j.Logger;
 import org.apache.rya.api.domain.RyaSubGraph;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory.ConfigurationException;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory.IncrementalExporterFactoryException;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporterFactory;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalRyaSubGraphExporter;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalRyaSubGraphExporterFactory;
 
 import com.google.common.base.Optional;
 
@@ -33,9 +32,11 @@ import com.google.common.base.Optional;
  * exporting {@link RyaSubGraph}s from the Rya Fluo application to Kafka.
  *
  */
-public class KafkaRyaSubGraphExporterFactory implements IncrementalRyaSubGraphExporterFactory {
+public class KafkaRyaSubGraphExporterFactory implements IncrementalResultExporterFactory {
 
     private static final Logger log = Logger.getLogger(KafkaRyaSubGraphExporterFactory.class);
+    public static final String CONF_USE_KAFKA_SUBGRAPH_EXPORTER = "pcj.fluo.export.kafka.subgraph.enabled";
+    public static final String CONF_KAFKA_SUBGRAPH_SERIALIZER = "pcj.fluo.export.kafka.subgraph.serializer";
     
     /**
      * Builds a {@link KafkaRyaSubGraphExporter}.
@@ -45,10 +46,10 @@ public class KafkaRyaSubGraphExporterFactory implements IncrementalRyaSubGraphEx
      * @throws ConfigurationException
      */
     @Override
-    public Optional<IncrementalRyaSubGraphExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException {
-        final KafkaExportParameters exportParams = new KafkaExportParameters(context.getObserverConfiguration().toMap());
-        log.debug("KafkaRyaSubGraphExporterFactory.build(): params.isExportToKafka()=" + exportParams.isExportToKafka());
-        if (exportParams.isExportToKafka()) {
+    public Optional<IncrementalResultExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException {
+        final KafkaSubGraphExporterParameters exportParams = new KafkaSubGraphExporterParameters(context.getObserverConfiguration().toMap());
+        log.debug("KafkaRyaSubGraphExporterFactory.build(): params.isExportToKafka()=" + exportParams.getUseKafkaSubgraphExporter());
+        if (exportParams.getUseKafkaSubgraphExporter()) {
             // Setup Kafka connection
             KafkaProducer<String, RyaSubGraph> producer = new KafkaProducer<String, RyaSubGraph>(exportParams.listAllConfig());
             // Create the exporter

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaSubGraphExporterParameters.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaSubGraphExporterParameters.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaSubGraphExporterParameters.java
new file mode 100644
index 0000000..1472fdd
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaSubGraphExporterParameters.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.kafka;
+
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.common.serialization.StringSerializer;
+
+import com.google.common.base.Preconditions;
+
+
+public class KafkaSubGraphExporterParameters extends KafkaExportParameterBase {
+
+    public static final String CONF_USE_KAFKA_SUBGRAPH_EXPORTER = "pcj.fluo.export.kafka.subgraph.enabled";
+    public static final String CONF_KAFKA_SUBGRAPH_SERIALIZER = "pcj.fluo.export.kafka.subgraph.serializer";
+    
+    public KafkaSubGraphExporterParameters(final Map<String, String> params) {
+        super(params);
+    }
+    
+    /**
+     * Instructs the Fluo application to use the Kafka BindingSet Exporter
+     * and sets the appropriate Key/Value Serializer parameters for writing RyaSubGraphs to Kafka.
+     * @param useExporter
+     *            - {@code True} if the Fluo application should use the
+     *            {@link KafkaRyaSubGraphExporter}; otherwise {@code false}.
+     */
+    public void setUseKafkaSubgraphExporter(final boolean useExporter) {
+        setBoolean(params, CONF_USE_KAFKA_SUBGRAPH_EXPORTER, useExporter);
+    }
+
+    /**
+     * @return {@code True} if the Fluo application should use the {@link KafkaRyaSubGraphExporter}; otherwise
+     *         {@code false}. Defaults to {@code false} if no value is present.
+     */
+    public boolean getUseKafkaSubgraphExporter() {
+        return getBoolean(params, CONF_USE_KAFKA_SUBGRAPH_EXPORTER, false);
+    }
+    
+    /**
+     * 
+     * @param serializer - Used for Serializing RyaSubGraphs pushed to Kafka
+     */
+    public void setKafkaSubGraphSerializer(String serializer) {
+        params.put(CONF_KAFKA_SUBGRAPH_SERIALIZER, Preconditions.checkNotNull(serializer));
+    }
+    
+    /**
+     * @return - Serializer used for Serializing RyaSubGraphs to Kafka
+     */
+    public String getKafkaSubGraphSerializer() {
+        return params.getOrDefault(CONF_KAFKA_SUBGRAPH_SERIALIZER, RyaSubGraphKafkaSerDe.class.getName());
+    }
+    
+    @Override
+    public Properties listAllConfig() {
+        Properties props = super.listAllConfig();
+        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
+        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getKafkaSubGraphSerializer());
+        return props;
+    }
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java
new file mode 100644
index 0000000..604462b
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporter.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.rya;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.util.Collections;
+import java.util.Set;
+
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
+import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
+import org.apache.rya.indexing.pcj.storage.PeriodicQueryStorageException;
+import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
+
+import com.google.common.collect.Sets;
+
+public class PeriodicBindingSetExporter implements IncrementalBindingSetExporter {
+
+    private PeriodicQueryResultStorage periodicStorage;
+    
+    /**
+     * Constructs an instance of {@link PeriodicBindingSetExporter}.
+     *
+     * @param pcjStorage - The PCJ storage the new results will be exported to. (not null)
+     */
+    public PeriodicBindingSetExporter(PeriodicQueryResultStorage periodicStorage) {
+        this.periodicStorage = checkNotNull(periodicStorage);
+    }
+    
+    @Override
+    public Set<QueryType> getQueryTypes() {
+        return Sets.newHashSet(QueryType.PERIODIC);
+    }
+
+    @Override
+    public ExportStrategy getExportStrategy() {
+        return ExportStrategy.RYA;
+    }
+
+    @Override
+    public void close() throws Exception {
+    }
+
+    @Override
+    public void export(String queryId, VisibilityBindingSet result) throws ResultExportException {
+        try {
+            periodicStorage.addPeriodicQueryResults(queryId, Collections.singleton(result));
+        } catch (PeriodicQueryStorageException e) {
+            throw new ResultExportException("Could not successfully export the BindingSet: " + result, e);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporterFactory.java
new file mode 100644
index 0000000..0a0b767
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/PeriodicBindingSetExporterFactory.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.rya;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.fluo.api.observer.Observer.Context;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporterFactory;
+import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
+import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage;
+
+import com.google.common.base.Optional;
+
+public class PeriodicBindingSetExporterFactory implements IncrementalResultExporterFactory {
+
+    @Override
+    public Optional<IncrementalResultExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException {
+        checkNotNull(context);
+
+        // Wrap the context's parameters for parsing.
+        final RyaExportParameters params = new RyaExportParameters( context.getObserverConfiguration().toMap() );
+
+        if(params.getUsePeriodicBindingSetExporter()) {
+            // Setup Zookeeper connection info.
+            final String accumuloInstance = params.getAccumuloInstanceName().get();
+            final String zookeeperServers =  params.getZookeeperServers().get().replaceAll(";", ",");
+            final Instance inst = new ZooKeeperInstance(accumuloInstance, zookeeperServers);
+
+            try {
+                // Setup Accumulo connection info.
+                final String exporterUsername = params.getExporterUsername().get();
+                final String exporterPassword = params.getExporterPassword().get();
+                final Connector accumuloConn = inst.getConnector(exporterUsername, new PasswordToken(exporterPassword));
+
+                // Setup Rya PCJ Storage.
+                final String ryaInstanceName = params.getRyaInstanceName().get();
+                final PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(accumuloConn, ryaInstanceName);
+                
+                // Make the exporter.
+                final IncrementalBindingSetExporter exporter = new PeriodicBindingSetExporter(periodicStorage);
+                return Optional.of(exporter);
+
+            } catch (final AccumuloException | AccumuloSecurityException e) {
+                throw new IncrementalExporterFactoryException("Could not initialize the Accumulo connector using the provided configuration.", e);
+            }
+        } else {
+            return Optional.absent();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporter.java
index 54c39b7..8a9dbe4 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporter.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporter.java
@@ -22,55 +22,42 @@ import static com.google.common.base.Preconditions.checkNotNull;
 import static java.util.Objects.requireNonNull;
 
 import java.util.Collections;
+import java.util.Set;
 
-import org.apache.fluo.api.client.TransactionBase;
-import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
-import org.apache.rya.indexing.pcj.storage.PeriodicQueryStorageException;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 
+import com.google.common.collect.Sets;
+
 /**
  * Incrementally exports SPARQL query results to Accumulo PCJ tables as they are defined by Rya.
  */
 public class RyaBindingSetExporter implements IncrementalBindingSetExporter {
 
     private final PrecomputedJoinStorage pcjStorage;
-    private final PeriodicQueryResultStorage periodicStorage;
 
     /**
      * Constructs an instance of {@link RyaBindingSetExporter}.
      *
      * @param pcjStorage - The PCJ storage the new results will be exported to. (not null)
      */
-    public RyaBindingSetExporter(final PrecomputedJoinStorage pcjStorage, PeriodicQueryResultStorage periodicStorage) {
+    public RyaBindingSetExporter(final PrecomputedJoinStorage pcjStorage) {
         this.pcjStorage = checkNotNull(pcjStorage);
-        this.periodicStorage = checkNotNull(periodicStorage);
     }
 
     @Override
-    public void export(
-            final TransactionBase fluoTx,
-            final String queryId,
-            final VisibilityBindingSet result) throws ResultExportException {
-        requireNonNull(fluoTx);
+    public void export(final String queryId, final VisibilityBindingSet result) throws ResultExportException {
         requireNonNull(queryId);
         requireNonNull(result);
 
-        // Look up the ID the PCJ represents within the PCJ Storage.
-        final String pcjId = fluoTx.gets(queryId, FluoQueryColumns.RYA_PCJ_ID);
-
         try {
-            if (result.hasBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID)) {
-                periodicStorage.addPeriodicQueryResults(pcjId, Collections.singleton(result));
-            } else {
-                pcjStorage.addResults(pcjId, Collections.singleton(result));
-            }
-        } catch (final PCJStorageException | PeriodicQueryStorageException e) {
-            throw new ResultExportException("A result could not be exported to Rya.", e);
+            pcjStorage.addResults(queryId, Collections.singleton(result));
+        } catch (PCJStorageException e) {
+            throw new ResultExportException("Unable to successfully export the result: " + result, e);
         }
     }
 
@@ -78,4 +65,14 @@ public class RyaBindingSetExporter implements IncrementalBindingSetExporter {
     public void close() throws Exception {
         pcjStorage.close();
     }
+
+    @Override
+    public Set<QueryType> getQueryTypes() {
+        return Sets.newHashSet(QueryType.PROJECTION);
+    }
+
+    @Override
+    public ExportStrategy getExportStrategy() {
+        return ExportStrategy.RYA;
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporterFactory.java
index 82ce9c6..a87243e 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporterFactory.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaBindingSetExporterFactory.java
@@ -26,8 +26,10 @@ import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.fluo.api.observer.Observer.Context;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporterFactory;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
 import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPcjStorage;
@@ -35,21 +37,19 @@ import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultS
 
 import com.google.common.base.Optional;
 
-import org.apache.fluo.api.observer.Observer.Context;
-
 /**
  * Creates instances of {@link RyaBindingSetExporter}.
  */
-public class RyaBindingSetExporterFactory implements IncrementalBindingSetExporterFactory {
+public class RyaBindingSetExporterFactory implements IncrementalResultExporterFactory {
 
     @Override
-    public Optional<IncrementalBindingSetExporter> build(final Context context) throws IncrementalExporterFactoryException, ConfigurationException {
+    public Optional<IncrementalResultExporter> build(final Context context) throws IncrementalExporterFactoryException, ConfigurationException {
         checkNotNull(context);
 
         // Wrap the context's parameters for parsing.
         final RyaExportParameters params = new RyaExportParameters( context.getObserverConfiguration().toMap() );
 
-        if(params.isExportToRya()) {
+        if(params.getUseRyaBindingSetExporter()) {
             // Setup Zookeeper connection info.
             final String accumuloInstance = params.getAccumuloInstanceName().get();
             final String zookeeperServers =  params.getZookeeperServers().get().replaceAll(";", ",");
@@ -64,10 +64,9 @@ public class RyaBindingSetExporterFactory implements IncrementalBindingSetExport
                 // Setup Rya PCJ Storage.
                 final String ryaInstanceName = params.getRyaInstanceName().get();
                 final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(accumuloConn, ryaInstanceName);
-                final PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(accumuloConn, ryaInstanceName);
                 
                 // Make the exporter.
-                final IncrementalBindingSetExporter exporter = new RyaBindingSetExporter(pcjStorage, periodicStorage);
+                final IncrementalBindingSetExporter exporter = new RyaBindingSetExporter(pcjStorage);
                 return Optional.of(exporter);
 
             } catch (final AccumuloException | AccumuloSecurityException e) {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParameters.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParameters.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParameters.java
index a1ba5b8..aa5d3cd 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParameters.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaExportParameters.java
@@ -38,7 +38,8 @@ import org.apache.fluo.api.observer.Observer;
 @DefaultAnnotation(NonNull.class)
 public class RyaExportParameters extends ParametersBase {
 
-    public static final String CONF_EXPORT_TO_RYA = "pcj.fluo.export.rya.enabled";
+    public static final String CONF_USE_RYA_BINDING_SET_EXPORTER = "pcj.fluo.export.rya.bindingset.enabled";
+    public static final String CONF_USE_PERIODIC_BINDING_SET_EXPORTER = "pcj.fluo.export.periodic.bindingset.enabled";
     public static final String CONF_ACCUMULO_INSTANCE_NAME = "pcj.fluo.export.rya.accumuloInstanceName";
     public static final String CONF_ZOOKEEPER_SERVERS = "pcj.fluo.export.rya.zookeeperServers";
     public static final String CONF_EXPORTER_USERNAME = "pcj.fluo.export.rya.exporterUsername";
@@ -57,19 +58,35 @@ public class RyaExportParameters extends ParametersBase {
     }
 
     /**
-     * @param isExportToRya - {@code True} if the Fluo application should export
-     *   to Rya; otherwise {@code false}.
+     * @param useExporter - {@code True} if the Fluo application should use the {@link RyaBindingSetExporter}; otherwise
+     *            {@code false}.
      */
-    public void setExportToRya(final boolean isExportToRya) {
-        setBoolean(params, CONF_EXPORT_TO_RYA, isExportToRya);
+    public void setUseRyaBindingSetExporter(final boolean useExporter) {
+        setBoolean(params, CONF_USE_RYA_BINDING_SET_EXPORTER, useExporter);
     }
 
     /**
-     * @return {@code True} if the Fluo application should export to Rya; otherwise
+     * @return {@code True} if the Fluo application should use the {@link RyaBindingSetExporter}; otherwise
      *   {@code false}. Defaults to {@code false} if no value is present.
      */
-    public boolean isExportToRya() {
-        return getBoolean(params, CONF_EXPORT_TO_RYA, false);
+    public boolean getUseRyaBindingSetExporter() {
+        return getBoolean(params, CONF_USE_RYA_BINDING_SET_EXPORTER, false);
+    }
+    
+    /**
+     * @param useExporter - {@code True} if the Fluo application should use the
+     *            {@link PeriodicBindingSetExporter}; otherwise {@code false}.
+     */
+    public void setUsePeriodicBindingSetExporter(final boolean useExporter) {
+        setBoolean(params, CONF_USE_PERIODIC_BINDING_SET_EXPORTER, useExporter);
+    }
+
+    /**
+     * @return {@code True} if the Fluo application should use the {@link PeriodicBindingSetExporter}; otherwise
+     *         {@code false}. Defaults to {@code false} if no value is present.
+     */
+    public boolean getUsePeriodicBindingSetExporter() {
+        return getBoolean(params, CONF_USE_PERIODIC_BINDING_SET_EXPORTER, false);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExportParameters.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExportParameters.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExportParameters.java
new file mode 100644
index 0000000..6a99a7e
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExportParameters.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.rya;
+
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.fluo.api.config.FluoConfiguration;
+
+import com.google.common.base.Preconditions;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+
+/**
+ * This class manages the parameters used to construct the RyaSubGraphExporter.
+ *
+ */
+public class RyaSubGraphExportParameters extends RyaExportParameters {
+
+    public static final String CONF_FLUO_INSTANCE = "pcj.fluo.export.rya.fluo.instance";
+    public static final String CONF_FLUO_INSTANCE_ZOOKEEPERS = "pcj.fluo.export.rya.fluo.instance.zookeepers";
+    public static final String CONF_FLUO_TABLE_NAME = "pcj.fluo.export.rya.fluo.table.name";
+    public static final String CONF_USE_RYA_SUBGRAPH_EXPORTER = "pcj.fluo.export.rya.subgraph.enabled";
+    
+    
+    public RyaSubGraphExportParameters(Map<String, String> params) {
+        super(params);
+    }
+    
+    /**
+     * @param useExporter - indicates whether to use the {@link RyaSubGraphExporter}
+     */
+    public void setUseRyaSubGraphExporter(boolean useExporter) {
+        setBoolean(params, CONF_USE_RYA_SUBGRAPH_EXPORTER, useExporter);
+    }
+    
+    /**
+     * @return boolean indicating whether to use the {@link RyaSubGraphExporter}
+     */
+    public boolean getUseRyaSubGraphExporter() {
+        return getBoolean(params, CONF_USE_RYA_SUBGRAPH_EXPORTER, false);
+    }
+    
+    /**
+     * @param fluoInstance - the Accumulo instance that Fluo is running on
+     */
+    public void setFluoInstanceName(String fluoInstance) {
+        params.put(CONF_FLUO_INSTANCE, Preconditions.checkNotNull(fluoInstance));
+    }
+    
+    /**
+     * @return the Accumulo instance that Fluo is running on
+     */
+    public Optional<String> getFluoInstanceName() {
+        return Optional.ofNullable(params.get(CONF_FLUO_INSTANCE));
+    }
+    
+    /**
+     * @param fluoTable - the name of the Accumulo Fluo table
+     */
+    public void setFluoTable(@Nullable String fluoTable) {
+        params.put(CONF_FLUO_TABLE_NAME, fluoTable);
+    }
+    
+    /**
+     * @return the name of the Accumulo Fluo table
+     */
+    public Optional<String> getFluoTable() {
+        return Optional.ofNullable(params.get(CONF_FLUO_TABLE_NAME));
+    }
+    
+    /**
+     * @param zookeepers - the zookeepers for the Fluo instance
+     */
+    public void setFluoZookeepers(@Nullable String zookeepers) {
+        params.put(CONF_FLUO_INSTANCE_ZOOKEEPERS, zookeepers);
+    }
+    
+    /**
+     * @return - the zookeepers for the Fluo instance
+     */
+    public Optional<String> getFLuoZookeepers() {
+        return Optional.ofNullable(params.get(CONF_FLUO_INSTANCE_ZOOKEEPERS));
+    }
+    
+    /**
+     * Uses underlying parameter map to build a FluoConfiguration object
+     * @return - FluoConfiguration for creating a FluoClient
+     */
+    public FluoConfiguration getFluoConfiguration() {
+        final FluoConfiguration config = new FluoConfiguration();
+        config.setMiniStartAccumulo(false);
+        config.setAccumuloInstance(params.get(CONF_ACCUMULO_INSTANCE_NAME));
+        config.setAccumuloUser(params.get(CONF_EXPORTER_USERNAME));
+        config.setAccumuloPassword(params.get(CONF_EXPORTER_PASSWORD));
+        config.setInstanceZookeepers(params.get(CONF_FLUO_INSTANCE_ZOOKEEPERS));
+        config.setAccumuloZookeepers(params.get(CONF_ZOOKEEPER_SERVERS));
+
+        config.setApplicationName(params.get(CONF_FLUO_APP_NAME));
+        config.setAccumuloTable(params.get(CONF_FLUO_TABLE_NAME));
+        return config;
+    }
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporter.java
new file mode 100644
index 0000000..e33ea97
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporter.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.rya;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.fluo.api.client.FluoClient;
+import org.apache.fluo.api.client.TransactionBase;
+import org.apache.fluo.api.data.Bytes;
+import org.apache.log4j.Logger;
+import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
+import org.apache.rya.api.domain.RyaStatement;
+import org.apache.rya.api.domain.RyaSubGraph;
+import org.apache.rya.api.resolver.triple.TripleRow;
+import org.apache.rya.api.resolver.triple.TripleRowResolverException;
+import org.apache.rya.api.resolver.triple.impl.WholeRowTripleResolver;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter.ResultExportException;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalRyaSubGraphExporter;
+import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+
+/**
+ * This exporter is used to import {@link RyaSubGraph}s back into Fluo. By ingesting
+ * RyaSubGraphs back into Fluo, queries can be chained together.
+ *
+ */
+public class RyaSubGraphExporter implements IncrementalRyaSubGraphExporter {
+
+    private static final Logger log = Logger.getLogger(RyaSubGraphExporter.class);
+    private static final WholeRowTripleResolver TRIPLE_RESOLVER = new WholeRowTripleResolver();
+    private final FluoClient fluo;
+    
+    public RyaSubGraphExporter(FluoClient fluo) {
+        this.fluo = Preconditions.checkNotNull(fluo);
+    }
+    
+    @Override
+    public Set<QueryType> getQueryTypes() {
+        return Sets.newHashSet(QueryType.CONSTRUCT);
+    }
+
+    @Override
+    public ExportStrategy getExportStrategy() {
+        return ExportStrategy.RYA;
+    }
+
+    @Override
+    public void close() throws Exception {
+        fluo.close();
+    }
+
+    @Override
+    public void export(String constructID, RyaSubGraph subgraph) throws ResultExportException {
+        insertTriples(fluo.newTransaction(), subgraph.getStatements());
+    }
+    
+    private void insertTriples(TransactionBase tx, final Collection<RyaStatement> triples) {
+        for (final RyaStatement triple : triples) {
+            Optional<byte[]> visibility = Optional.fromNullable(triple.getColumnVisibility());
+            try {
+                tx.set(Bytes.of(spoFormat(triple)), FluoQueryColumns.TRIPLES, Bytes.of(visibility.or(new byte[0])));
+            } catch (final TripleRowResolverException e) {
+                log.error("Could not convert a Triple into the SPO format: " + triple);
+            }
+        }
+    }
+
+    /**
+     * Converts a triple into a byte[] holding the Rya SPO representation of it.
+     *
+     * @param triple - The triple to convert. (not null)
+     * @return The Rya SPO representation of the triple.
+     * @throws TripleRowResolverException The triple could not be converted.
+     */
+    private static byte[] spoFormat(final RyaStatement triple) throws TripleRowResolverException {
+        checkNotNull(triple);
+        final Map<TABLE_LAYOUT, TripleRow> serialized = TRIPLE_RESOLVER.serialize(triple);
+        final TripleRow spoRow = serialized.get(TABLE_LAYOUT.SPO);
+        return spoRow.getRow();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporterFactory.java
new file mode 100644
index 0000000..25f60a5
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/rya/RyaSubGraphExporterFactory.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export.rya;
+
+import org.apache.fluo.api.client.FluoClient;
+import org.apache.fluo.api.client.FluoFactory;
+import org.apache.fluo.api.config.FluoConfiguration;
+import org.apache.fluo.api.observer.Observer.Context;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporterFactory;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+
+/**
+ * Factory class for building {@link RyaSubGraphExporter}s.
+ *
+ */
+public class RyaSubGraphExporterFactory implements IncrementalResultExporterFactory {
+
+    @Override
+    public Optional<IncrementalResultExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException {
+        Preconditions.checkNotNull(context);
+        
+        RyaSubGraphExportParameters params = new RyaSubGraphExportParameters(context.getObserverConfiguration().toMap());
+
+        if (params.getUseRyaSubGraphExporter()) {
+            try {
+                //Get FluoConfiguration from params
+                FluoConfiguration conf = params.getFluoConfiguration();
+                FluoClient fluo = FluoFactory.newClient(conf);
+                
+                //Create exporter
+                RyaSubGraphExporter exporter = new RyaSubGraphExporter(fluo);
+                return Optional.of(exporter);
+            } catch (Exception e) {
+                throw new IncrementalExporterFactoryException("Could not initialize the RyaSubGraphExporter", e);
+            }
+        }
+        return Optional.absent();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/AggregationObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/AggregationObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/AggregationObserver.java
index 1cb1594..6147fa8 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/AggregationObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/AggregationObserver.java
@@ -28,7 +28,6 @@ import org.apache.rya.indexing.pcj.fluo.app.AggregationResultUpdater.ObjectSeria
 import org.apache.rya.indexing.pcj.fluo.app.BindingSetRow;
 import org.apache.rya.indexing.pcj.fluo.app.query.AggregationMetadata;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 import org.openrdf.query.BindingSet;
 
@@ -45,8 +44,6 @@ public class AggregationObserver extends BindingSetUpdater {
 
     private static final AggregationStateSerDe STATE_SERDE = new ObjectSerializationAggregationStateSerDe();
 
-    private final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
-
     @Override
     public ObservedColumn getObservedColumn() {
         return new ObservedColumn(FluoQueryColumns.AGGREGATION_BINDING_SET, NotificationType.STRONG);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/BindingSetUpdater.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/BindingSetUpdater.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/BindingSetUpdater.java
index 7d0fd5e..c0cfa1d 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/BindingSetUpdater.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/BindingSetUpdater.java
@@ -55,7 +55,7 @@ import edu.umd.cs.findbugs.annotations.NonNull;
 public abstract class BindingSetUpdater extends AbstractObserver {
     private static final Logger log = Logger.getLogger(BindingSetUpdater.class);
     // DAO
-    private final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
+    protected final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
 
     // Updaters
     private final JoinResultUpdater joinUpdater = new JoinResultUpdater();

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ConstructQueryResultObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ConstructQueryResultObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ConstructQueryResultObserver.java
index f0fef07..61e7244 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ConstructQueryResultObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ConstructQueryResultObserver.java
@@ -1,4 +1,3 @@
-package org.apache.rya.indexing.pcj.fluo.app.observers;
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -17,54 +16,20 @@ package org.apache.rya.indexing.pcj.fluo.app.observers;
  * specific language governing permissions and limitations
  * under the License.
  */
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-import java.io.UnsupportedEncodingException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
+package org.apache.rya.indexing.pcj.fluo.app.observers;
 
 import org.apache.fluo.api.client.TransactionBase;
 import org.apache.fluo.api.data.Bytes;
 import org.apache.fluo.api.data.Column;
 import org.apache.fluo.api.observer.AbstractObserver;
 import org.apache.log4j.Logger;
-import org.apache.rya.accumulo.utils.VisibilitySimplifier;
-import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
 import org.apache.rya.api.domain.RyaStatement;
-import org.apache.rya.api.domain.RyaSubGraph;
-import org.apache.rya.api.resolver.triple.TripleRow;
-import org.apache.rya.api.resolver.triple.TripleRowResolverException;
-import org.apache.rya.api.resolver.triple.impl.WholeRowTripleResolver;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory.IncrementalExporterFactoryException;
+import org.apache.rya.indexing.pcj.fluo.app.BindingSetRow;
+import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants;
+import org.apache.rya.indexing.pcj.fluo.app.NodeType;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalRyaSubGraphExporter;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalRyaSubGraphExporterFactory;
-import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaRyaSubGraphExporterFactory;
-import org.apache.rya.indexing.pcj.fluo.app.export.kafka.RyaSubGraphKafkaSerDe;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
 
-import com.google.common.base.Optional;
-import com.google.common.collect.ImmutableSet;
-
 /**
  * Monitors the Column {@link FluoQueryColumns#CONSTRUCT_STATEMENTS} for new
  * Construct Query {@link RyaStatement}s and exports the results using the
@@ -74,49 +39,7 @@ import com.google.common.collect.ImmutableSet;
  */
 public class ConstructQueryResultObserver extends AbstractObserver {
 
-    private static final WholeRowTripleResolver TRIPLE_RESOLVER = new WholeRowTripleResolver();
     private static final Logger log = Logger.getLogger(ConstructQueryResultObserver.class);
-    private static final RyaSubGraphKafkaSerDe serializer = new RyaSubGraphKafkaSerDe();
-
-    /**
-     * We expect to see the same expressions a lot, so we cache the simplified
-     * forms.
-     */
-    private final Map<String, String> simplifiedVisibilities = new HashMap<>();
-
-    /**
-     * Builders for each type of result exporter we support.
-     */
-    private static final ImmutableSet<IncrementalRyaSubGraphExporterFactory> factories = ImmutableSet
-            .<IncrementalRyaSubGraphExporterFactory> builder().add(new KafkaRyaSubGraphExporterFactory()).build();
-
-    /**
-     * The exporters that are configured.
-     */
-    private ImmutableSet<IncrementalRyaSubGraphExporter> exporters = null;
-
-    /**
-     * Before running, determine which exporters are configured and set them up.
-     */
-    @Override
-    public void init(final Context context) {
-        final ImmutableSet.Builder<IncrementalRyaSubGraphExporter> exportersBuilder = ImmutableSet.builder();
-
-        for (final IncrementalRyaSubGraphExporterFactory builder : factories) {
-            try {
-                log.debug("ConstructQueryResultObserver.init(): for each exportersBuilder=" + builder);
-
-                final Optional<IncrementalRyaSubGraphExporter> exporter = builder.build(context);
-                if (exporter.isPresent()) {
-                    exportersBuilder.add(exporter.get());
-                }
-            } catch (final IncrementalExporterFactoryException e) {
-                log.error("Could not initialize a result exporter.", e);
-            }
-        }
-
-        exporters = exportersBuilder.build();
-    }
 
     @Override
     public ObservedColumn getObservedColumn() {
@@ -125,74 +48,20 @@ public class ConstructQueryResultObserver extends AbstractObserver {
 
     @Override
     public void process(TransactionBase tx, Bytes row, Column col) throws Exception {
+        
+        //Build row for parent that result will be written to
+        BindingSetRow bsRow = BindingSetRow.make(row);
+        String constructNodeId = bsRow.getNodeId();
+        String bsString= bsRow.getBindingSetString();
+        String parentNodeId = tx.get(Bytes.of(constructNodeId), FluoQueryColumns.CONSTRUCT_PARENT_NODE_ID).toString();
+        String rowString = parentNodeId + IncrementalUpdateConstants.NODEID_BS_DELIM + bsString;
+        
+        //Get NodeType of the parent node
+        NodeType parentType = NodeType.fromNodeId(parentNodeId).get();
+        //Get data for the ConstructQuery result
         Bytes bytes = tx.get(row, col);
-        RyaSubGraph subgraph = serializer.fromBytes(bytes.toArray());
-        Set<RyaStatement> statements = subgraph.getStatements();
-        if (statements.size() > 0) {
-            byte[] visibility = statements.iterator().next().getColumnVisibility();
-            visibility = simplifyVisibilities(visibility);
-            for(RyaStatement statement: statements) {
-                statement.setColumnVisibility(visibility);
-            }
-            subgraph.setStatements(statements);
-
-            for (IncrementalRyaSubGraphExporter exporter : exporters) {
-                exporter.export(row.toString(), subgraph);
-            }
-        }
-        //add generated triples back into Fluo for chaining queries together
-        insertTriples(tx, subgraph.getStatements());
-    }
-    
-    @Override
-    public void close() {
-        if(exporters != null) {
-            for(final IncrementalRyaSubGraphExporter exporter : exporters) {
-                try {
-                    exporter.close();
-                } catch(final Exception e) {
-                    log.warn("Problem encountered while closing one of the exporters.", e);
-                }
-            }
-        }
-    }
-
-    private byte[] simplifyVisibilities(byte[] visibilityBytes) throws UnsupportedEncodingException {
-        // Simplify the result's visibilities and cache new simplified
-        // visibilities
-        String visibility = new String(visibilityBytes, "UTF-8");
-        if (!simplifiedVisibilities.containsKey(visibility)) {
-            String simplified = VisibilitySimplifier.simplify(visibility);
-            simplifiedVisibilities.put(visibility, simplified);
-        }
-        return simplifiedVisibilities.get(visibility).getBytes("UTF-8");
+        //Write result to parent
+        tx.set(Bytes.of(rowString), parentType.getResultColumn(), bytes);
     }
-    
-    private void insertTriples(TransactionBase tx, final Collection<RyaStatement> triples) {
-
-        for (final RyaStatement triple : triples) {
-            Optional<byte[]> visibility = Optional.fromNullable(triple.getColumnVisibility());
-            try {
-                tx.set(Bytes.of(spoFormat(triple)), FluoQueryColumns.TRIPLES, Bytes.of(visibility.or(new byte[0])));
-            } catch (final TripleRowResolverException e) {
-                log.error("Could not convert a Triple into the SPO format: " + triple);
-            }
-        }
-    }
-    
-
-    /**
-     * Converts a triple into a byte[] holding the Rya SPO representation of it.
-     *
-     * @param triple - The triple to convert. (not null)
-     * @return The Rya SPO representation of the triple.
-     * @throws TripleRowResolverException The triple could not be converted.
-     */
-    public static byte[] spoFormat(final RyaStatement triple) throws TripleRowResolverException {
-        checkNotNull(triple);
-        final Map<TABLE_LAYOUT, TripleRow> serialized = TRIPLE_RESOLVER.serialize(triple);
-        final TripleRow spoRow = serialized.get(TABLE_LAYOUT.SPO);
-        return spoRow.getRow();
-    }
-
+   
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/FilterObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/FilterObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/FilterObserver.java
index ee03334..b4edfea 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/FilterObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/FilterObserver.java
@@ -25,7 +25,6 @@ import org.apache.fluo.api.data.Bytes;
 import org.apache.rya.indexing.pcj.fluo.app.BindingSetRow;
 import org.apache.rya.indexing.pcj.fluo.app.query.FilterMetadata;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
 import org.openrdf.query.BindingSet;
@@ -39,8 +38,6 @@ public class FilterObserver extends BindingSetUpdater {
 
     private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
 
-    private final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
-
     @Override
     public ObservedColumn getObservedColumn() {
         return new ObservedColumn(FluoQueryColumns.FILTER_BINDING_SET, NotificationType.STRONG);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/JoinObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/JoinObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/JoinObserver.java
index 28e31d8..c56a98f 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/JoinObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/JoinObserver.java
@@ -24,7 +24,6 @@ import org.apache.fluo.api.client.TransactionBase;
 import org.apache.fluo.api.data.Bytes;
 import org.apache.rya.indexing.pcj.fluo.app.BindingSetRow;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.JoinMetadata;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
@@ -39,8 +38,6 @@ public class JoinObserver extends BindingSetUpdater {
 
     private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
 
-    private final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
-
     @Override
     public ObservedColumn getObservedColumn() {
         return new ObservedColumn(FluoQueryColumns.JOIN_BINDING_SET, NotificationType.STRONG);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/PeriodicQueryObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/PeriodicQueryObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/PeriodicQueryObserver.java
index e7072e7..7d96baa 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/PeriodicQueryObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/PeriodicQueryObserver.java
@@ -25,7 +25,6 @@ import org.apache.fluo.api.data.Bytes;
 import org.apache.rya.indexing.pcj.fluo.app.BindingSetRow;
 import org.apache.rya.indexing.pcj.fluo.app.PeriodicQueryUpdater;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.PeriodicQueryMetadata;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
@@ -42,7 +41,6 @@ import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
 public class PeriodicQueryObserver extends BindingSetUpdater {
 
     private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
-    private final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
 
     @Override
     public ObservedColumn getObservedColumn() {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ProjectionObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ProjectionObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ProjectionObserver.java
index b712606..5d73b2e 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ProjectionObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/ProjectionObserver.java
@@ -25,7 +25,6 @@ import org.apache.fluo.api.data.Bytes;
 import org.apache.log4j.Logger;
 import org.apache.rya.indexing.pcj.fluo.app.BindingSetRow;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.ProjectionMetadata;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
@@ -37,7 +36,6 @@ public class ProjectionObserver extends BindingSetUpdater {
     private static final Logger log = Logger.getLogger(ProjectionObserver.class);
 
     private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
-    private final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
 
     @Override
     public ObservedColumn getObservedColumn() {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java
index e6368ba..ba7beee 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/QueryResultObserver.java
@@ -20,24 +20,24 @@ package org.apache.rya.indexing.pcj.fluo.app.observers;
 
 import static org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants.NODEID_BS_DELIM;
 
-import java.util.HashMap;
-import java.util.Map;
-
 import org.apache.fluo.api.client.TransactionBase;
 import org.apache.fluo.api.data.Bytes;
 import org.apache.fluo.api.data.Column;
 import org.apache.fluo.api.observer.AbstractObserver;
 import org.apache.log4j.Logger;
-import org.apache.rya.accumulo.utils.VisibilitySimplifier;
+import org.apache.rya.indexing.pcj.fluo.app.export.ExporterManager;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter.ResultExportException;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory.IncrementalExporterFactoryException;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporterFactory;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalResultExporterFactory.IncrementalExporterFactoryException;
 import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaBindingSetExporterFactory;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.KafkaRyaSubGraphExporterFactory;
+import org.apache.rya.indexing.pcj.fluo.app.export.rya.PeriodicBindingSetExporterFactory;
 import org.apache.rya.indexing.pcj.fluo.app.export.rya.RyaBindingSetExporterFactory;
+import org.apache.rya.indexing.pcj.fluo.app.export.rya.RyaSubGraphExporterFactory;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
-import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
+import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
+import org.apache.rya.indexing.pcj.fluo.app.query.QueryMetadata;
 
 import com.google.common.base.Optional;
 import com.google.common.collect.ImmutableSet;
@@ -46,28 +46,23 @@ import com.google.common.collect.ImmutableSet;
  * Performs incremental result exporting to the configured destinations.
  */
 public class QueryResultObserver extends AbstractObserver {
+    
     private static final Logger log = Logger.getLogger(QueryResultObserver.class);
-
-    private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
-
-    /**
-     * We expect to see the same expressions a lot, so we cache the simplified forms.
-     */
-    private final Map<String, String> simplifiedVisibilities = new HashMap<>();
-
+    private static final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO();
+    
     /**
-     * Builders for each type of result exporter we support.
+     * Builders for each type of {@link IncrementalBindingSetExporter} we support.
      */
-    private static final ImmutableSet<IncrementalBindingSetExporterFactory> factories =
-            ImmutableSet.<IncrementalBindingSetExporterFactory>builder()
+    private static final ImmutableSet<IncrementalResultExporterFactory> factories =
+            ImmutableSet.<IncrementalResultExporterFactory>builder()
                 .add(new RyaBindingSetExporterFactory())
                 .add(new KafkaBindingSetExporterFactory())
+                .add(new KafkaRyaSubGraphExporterFactory())
+                .add(new RyaSubGraphExporterFactory())
+                .add(new PeriodicBindingSetExporterFactory())
                 .build();
-
-    /**
-     * The exporters that are configured.
-     */
-    private ImmutableSet<IncrementalBindingSetExporter> exporters = null;
+    
+    private ExporterManager exporterManager;
 
     @Override
     public ObservedColumn getObservedColumn() {
@@ -79,63 +74,46 @@ public class QueryResultObserver extends AbstractObserver {
      */
     @Override
     public void init(final Context context) {
-        final ImmutableSet.Builder<IncrementalBindingSetExporter> exportersBuilder = ImmutableSet.builder();
-
-        for(final IncrementalBindingSetExporterFactory builder : factories) {
+        
+        ExporterManager.Builder managerBuilder = ExporterManager.builder();
+        
+        for(final IncrementalResultExporterFactory builder : factories) {
             try {
                 log.debug("QueryResultObserver.init(): for each exportersBuilder=" + builder);
 
-                final Optional<IncrementalBindingSetExporter> exporter = builder.build(context);
+                final Optional<IncrementalResultExporter> exporter = builder.build(context);
                 if(exporter.isPresent()) {
-                    exportersBuilder.add(exporter.get());
+                    managerBuilder.addIncrementalResultExporter(exporter.get());
                 }
             } catch (final IncrementalExporterFactoryException e) {
                 log.error("Could not initialize a result exporter.", e);
             }
         }
-
-        exporters = exportersBuilder.build();
+        
+        exporterManager = managerBuilder.build();
     }
+    
 
     @Override
     public void process(final TransactionBase tx, final Bytes brow, final Column col) throws Exception {
         final String row = brow.toString();
 
-        // Read the SPARQL query and it Binding Set from the row id.
+        // Read the queryId from the row and get the QueryMetadata.
         final String queryId = row.split(NODEID_BS_DELIM)[0];
+        final QueryMetadata metadata = dao.readQueryMetadata(tx, queryId);
 
         // Read the Child Binding Set that will be exported.
         final Bytes valueBytes = tx.get(brow, col);
-        final VisibilityBindingSet result = BS_SERDE.deserialize(valueBytes);
         
-        // Simplify the result's visibilities.
-        final String visibility = result.getVisibility();
-        if(!simplifiedVisibilities.containsKey(visibility)) {
-            final String simplified = VisibilitySimplifier.simplify( visibility );
-            simplifiedVisibilities.put(visibility, simplified);
-        }
-        result.setVisibility( simplifiedVisibilities.get(visibility) );
-
-        // Export the result using each of the provided exporters.
-        for(final IncrementalBindingSetExporter exporter : exporters) {
-            try {
-                exporter.export(tx, queryId, result);
-            } catch (final ResultExportException e) {
-                log.error("Could not export a binding set for query '" + queryId + "'. Binding Set: " + result, e);
-            }
-        }
+        exporterManager.export(metadata.getQueryType(), metadata.getExportStrategies(), queryId, valueBytes);
     }
 
     @Override
     public void close() {
-        if(exporters != null) {
-            for(final IncrementalBindingSetExporter exporter : exporters) {
-                try {
-                    exporter.close();
-                } catch(final Exception e) {
-                    log.warn("Problem encountered while closing one of the exporters.", e);
-                }
-            }
+        try {
+            exporterManager.close();
+        } catch (Exception e) {
+           log.warn("Encountered problems closing the ExporterManager.");
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/StatementPatternObserver.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/StatementPatternObserver.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/StatementPatternObserver.java
index 69a651e..607267a 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/StatementPatternObserver.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/observers/StatementPatternObserver.java
@@ -24,7 +24,6 @@ import org.apache.fluo.api.client.TransactionBase;
 import org.apache.fluo.api.data.Bytes;
 import org.apache.rya.indexing.pcj.fluo.app.BindingSetRow;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.StatementPatternMetadata;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
@@ -39,9 +38,6 @@ public class StatementPatternObserver extends BindingSetUpdater {
 
     private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
 
-    // DAO
-    private final FluoQueryMetadataDAO queryDao = new FluoQueryMetadataDAO();
-
     @Override
     public ObservedColumn getObservedColumn() {
         return new ObservedColumn(FluoQueryColumns.STATEMENT_PATTERN_BINDING_SET, NotificationType.STRONG);


[5/5] incubator-rya git commit: RYA-246-Query-Export-Strategy. Closes #213.

Posted by ca...@apache.org.
RYA-246-Query-Export-Strategy. Closes #213.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/05147266
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/05147266
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/05147266

Branch: refs/heads/master
Commit: 0514726604757c03e66015edee742b0fbdcf1ca2
Parents: 82df3ad
Author: Caleb Meier <ca...@parsons.com>
Authored: Mon Aug 7 21:22:00 2017 -0700
Committer: Caleb Meier <ca...@parsons.com>
Committed: Fri Aug 25 12:34:42 2017 -0700

----------------------------------------------------------------------
 .../org/apache/rya/api/client/CreatePCJ.java    |  37 ++++
 .../api/client/accumulo/AccumuloCreatePCJ.java  |  27 ++-
 .../api/client/accumulo/AccumuloDeletePCJ.java  |   6 +-
 .../rya/api/client/accumulo/FluoITBase.java     |   2 +-
 .../src/main/java/RyaClientExample.java         |   2 +-
 .../storage/accumulo/ShiftVarOrderFactory.java  |   1 +
 .../indexing/pcj/fluo/api/CreateFluoPcj.java    | 146 +++++++++----
 .../indexing/pcj/fluo/api/DeleteFluoPcj.java    | 127 +----------
 .../indexing/pcj/fluo/api/GetPcjMetadata.java   |  10 +-
 .../indexing/pcj/fluo/api/GetQueryReport.java   |  30 +--
 .../rya/indexing/pcj/fluo/api/ListQueryIds.java |   2 +-
 .../fluo/app/IncrementalUpdateConstants.java    |   3 -
 .../pcj/fluo/app/export/ExporterManager.java    | 216 +++++++++++++++++++
 .../export/IncrementalBindingSetExporter.java   |   8 +-
 .../IncrementalBindingSetExporterFactory.java   | 104 ---------
 .../app/export/IncrementalResultExporter.java   |  42 ++++
 .../IncrementalResultExporterFactory.java       | 104 +++++++++
 .../export/IncrementalRyaSubGraphExporter.java  |   2 +-
 .../IncrementalRyaSubGraphExporterFactory.java  |  47 ----
 .../pcj/fluo/app/export/NoOpExporter.java       |  59 +++++
 .../export/kafka/KafkaBindingSetExporter.java   |  25 ++-
 .../kafka/KafkaBindingSetExporterFactory.java   |  13 +-
 .../KafkaBindingSetExporterParameters.java      |  80 +++++++
 .../export/kafka/KafkaExportParameterBase.java  |  86 ++++++++
 .../app/export/kafka/KafkaExportParameters.java |  86 --------
 .../export/kafka/KafkaRyaSubGraphExporter.java  |  16 ++
 .../kafka/KafkaRyaSubGraphExporterFactory.java  |  17 +-
 .../kafka/KafkaSubGraphExporterParameters.java  |  81 +++++++
 .../export/rya/PeriodicBindingSetExporter.java  |  71 ++++++
 .../rya/PeriodicBindingSetExporterFactory.java  |  74 +++++++
 .../app/export/rya/RyaBindingSetExporter.java   |  43 ++--
 .../rya/RyaBindingSetExporterFactory.java       |  15 +-
 .../app/export/rya/RyaExportParameters.java     |  33 ++-
 .../export/rya/RyaSubGraphExportParameters.java | 120 +++++++++++
 .../app/export/rya/RyaSubGraphExporter.java     | 106 +++++++++
 .../export/rya/RyaSubGraphExporterFactory.java  |  58 +++++
 .../fluo/app/observers/AggregationObserver.java |   3 -
 .../fluo/app/observers/BindingSetUpdater.java   |   2 +-
 .../observers/ConstructQueryResultObserver.java | 167 ++------------
 .../pcj/fluo/app/observers/FilterObserver.java  |   3 -
 .../pcj/fluo/app/observers/JoinObserver.java    |   3 -
 .../app/observers/PeriodicQueryObserver.java    |   2 -
 .../fluo/app/observers/ProjectionObserver.java  |   2 -
 .../fluo/app/observers/QueryResultObserver.java |  94 ++++----
 .../app/observers/StatementPatternObserver.java |   4 -
 .../indexing/pcj/fluo/app/query/FluoQuery.java  |  20 +-
 .../pcj/fluo/app/query/FluoQueryColumns.java    |  26 ---
 .../fluo/app/query/FluoQueryMetadataDAO.java    |  19 +-
 .../pcj/fluo/app/query/QueryMetadata.java       |   5 +-
 .../fluo/app/query/SparqlFluoQueryBuilder.java  |  24 ++-
 .../app/query/UnsupportedQueryException.java    |  41 ++++
 .../pcj/fluo/app/util/FluoQueryUtils.java       |   8 +
 .../export/rya/KafkaExportParametersTest.java   |  25 +--
 .../app/export/rya/RyaExportParametersTest.java |   6 +-
 .../fluo/app/query/PeriodicQueryUtilTest.java   |   2 +-
 .../app/query/QueryMetadataVisitorTest.java     |   2 +-
 .../pcj/fluo/client/PcjAdminClient.java         |   4 +
 .../pcj/fluo/client/PcjAdminClientCommand.java  |   4 +-
 .../fluo/client/command/NewQueryCommand.java    |   3 +-
 .../fluo/client/command/QueryReportCommand.java |   3 +-
 .../fluo/client/util/QueryReportRenderer.java   |  10 +-
 .../rya/indexing/pcj/fluo/demo/DemoDriver.java  |   2 +-
 .../pcj/fluo/demo/FluoAndHistoricPcjsDemo.java  |   7 +-
 .../indexing/pcj/fluo/api/GetPcjMetadataIT.java |   5 +-
 .../indexing/pcj/fluo/api/ListQueryIdsIT.java   |  10 +-
 .../fluo/app/query/FluoQueryMetadataDAOIT.java  |  24 +--
 .../indexing/pcj/fluo/integration/BatchIT.java  |   3 +-
 .../pcj/fluo/integration/CreateDeleteIT.java    |   7 +-
 .../pcj/fluo/integration/KafkaExportIT.java     |  20 +-
 .../integration/KafkaRyaSubGraphExportIT.java   |  22 +-
 .../indexing/pcj/fluo/integration/QueryIT.java  |   6 +-
 .../pcj/fluo/test/base/KafkaExportITBase.java   |  59 ++---
 .../rya/pcj/fluo/test/base/RyaExportITBase.java |   3 +-
 .../PeriodicNotificationProviderIT.java         |   3 +-
 .../pruner/PeriodicNotificationBinPrunerIT.java |   6 +-
 .../notification/api/CreatePeriodicQuery.java   |  19 +-
 .../pruner/PeriodicQueryPruner.java             |   9 +-
 .../recovery/PeriodicNotificationProvider.java  |   3 +-
 .../org/apache/rya/shell/RyaAdminCommands.java  |  21 +-
 .../apache/rya/shell/RyaAdminCommandsTest.java  |  12 +-
 80 files changed, 1698 insertions(+), 924 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java b/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java
index e03a1f1..6e92b28 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/client/CreatePCJ.java
@@ -18,6 +18,8 @@
  */
 package org.apache.rya.api.client;
 
+import java.util.Set;
+
 import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
 import edu.umd.cs.findbugs.annotations.NonNull;
 
@@ -28,7 +30,41 @@ import edu.umd.cs.findbugs.annotations.NonNull;
 public interface CreatePCJ {
 
     /**
+     * Metadata enum used to indicate the type of query that is registered.  If
+     * the topmost node is a Construct QueryNode, then the type is Construct.  If the
+     * topmost node is a Projection QueryNode, then the type is Projection.  If the
+     * query contains a PeriodicQuery Filter anywhere within the query, then it is of type
+     * Periodic. 
+     *
+     */
+    public static enum QueryType{CONSTRUCT, PROJECTION, PERIODIC};
+    
+    /**
+     * Specifies the how Results will be exported from the Rya Fluo
+     * Application.
+     *
+     */
+    public static enum ExportStrategy{RYA, KAFKA, NO_OP_EXPORT};
+
+    
+    /**
+     * Designate a new PCJ that will be maintained by the target instance of Rya.
+     * Results will be exported according to the specified export strategies.
+     *
+     * @param instanceName - Indicates which Rya instance will create and maintain
+     *   the PCJ. (not null)
+     * @param sparql - The SPARQL query that will be maintained. (not null)
+     * @param strategies - The export strategies used to export results for this query
+     * @return The ID that was assigned to this newly created PCJ.
+     * @throws InstanceDoesNotExistException No instance of Rya exists for the provided name.
+     * @throws RyaClientException Something caused the command to fail.
+     */
+    public String createPCJ(final String instanceName, String sparql, Set<ExportStrategy> strategies) throws InstanceDoesNotExistException, RyaClientException;
+    
+    
+    /**
      * Designate a new PCJ that will be maintained by the target instance of Rya.
+     * Results will be exported to a Rya PCJ table.
      *
      * @param instanceName - Indicates which Rya instance will create and maintain
      *   the PCJ. (not null)
@@ -38,4 +74,5 @@ public interface CreatePCJ {
      * @throws RyaClientException Something caused the command to fail.
      */
     public String createPCJ(final String instanceName, String sparql) throws InstanceDoesNotExistException, RyaClientException;
+    
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePCJ.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePCJ.java b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePCJ.java
index 644189a..6aef33c 100644
--- a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePCJ.java
+++ b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloCreatePCJ.java
@@ -20,6 +20,8 @@ package org.apache.rya.api.client.accumulo;
 
 import static java.util.Objects.requireNonNull;
 
+import java.util.Set;
+
 import org.apache.accumulo.core.client.Connector;
 import org.apache.fluo.api.client.FluoClient;
 import org.apache.rya.accumulo.instance.AccumuloRyaInstanceDetailsRepository;
@@ -39,6 +41,7 @@ import org.apache.rya.api.instance.RyaDetailsUpdater.RyaDetailsMutator;
 import org.apache.rya.api.instance.RyaDetailsUpdater.RyaDetailsMutator.CouldNotApplyMutationException;
 import org.apache.rya.api.persist.RyaDAOException;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.storage.PcjException;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException;
@@ -49,6 +52,7 @@ import org.openrdf.repository.RepositoryException;
 import org.openrdf.sail.SailException;
 
 import com.google.common.base.Optional;
+import com.google.common.collect.Sets;
 
 import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
 import edu.umd.cs.findbugs.annotations.NonNull;
@@ -73,7 +77,7 @@ public class AccumuloCreatePCJ extends AccumuloCommand implements CreatePCJ {
     }
 
     @Override
-    public String createPCJ(final String instanceName, final String sparql) throws InstanceDoesNotExistException, RyaClientException {
+    public String createPCJ(final String instanceName, final String sparql, Set<ExportStrategy> strategies) throws InstanceDoesNotExistException, RyaClientException {
         requireNonNull(instanceName);
         requireNonNull(sparql);
 
@@ -99,9 +103,14 @@ public class AccumuloCreatePCJ extends AccumuloCommand implements CreatePCJ {
             if(fluoDetailsHolder.isPresent()) {
                 final String fluoAppName = fluoDetailsHolder.get().getUpdateAppName();
                 try {
-                    updateFluoApp(instanceName, fluoAppName, pcjStorage, pcjId);
+                    updateFluoApp(instanceName, fluoAppName, pcjId, sparql, strategies);
                 } catch (RepositoryException | MalformedQueryException | SailException | QueryEvaluationException | PcjException | RyaDAOException e) {
                     throw new RyaClientException("Problem while initializing the Fluo application with the new PCJ.", e);
+                } catch (UnsupportedQueryException e) {
+                    throw new RyaClientException("The new PCJ could not be initialized because it either contains an unsupported query node "
+                            + "or an invalid ExportStrategy for the given QueryType.  Projection queries can be exported to either Rya or Kafka,"
+                            + "unless they contain an aggregation, in which case they can only be exported to Kafka.  Construct queries can be exported"
+                            + "to Rya and Kafka, and Periodic queries can only be exported to Rya.");
                 }
 
                 // Update the Rya Details to indicate the PCJ is being updated incrementally.
@@ -133,9 +142,16 @@ public class AccumuloCreatePCJ extends AccumuloCommand implements CreatePCJ {
         }
     }
 
-    private void updateFluoApp(final String ryaInstance, final String fluoAppName, final PrecomputedJoinStorage pcjStorage, final String pcjId) throws RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, PcjException, RyaDAOException {
-        requireNonNull(pcjStorage);
+    @Override
+    public String createPCJ(String instanceName, String sparql) throws InstanceDoesNotExistException, RyaClientException {
+        return createPCJ(instanceName, sparql, Sets.newHashSet(ExportStrategy.RYA));
+    }
+    
+    
+    private void updateFluoApp(final String ryaInstance, final String fluoAppName, final String pcjId, String sparql, Set<ExportStrategy> strategies) throws RepositoryException, MalformedQueryException, SailException, QueryEvaluationException, PcjException, RyaDAOException, UnsupportedQueryException {
+        requireNonNull(sparql);
         requireNonNull(pcjId);
+        requireNonNull(strategies);
 
         // Connect to the Fluo application that is updating this instance's PCJs.
         final AccumuloConnectionDetails cd = super.getAccumuloConnectionDetails();
@@ -147,7 +163,8 @@ public class AccumuloCreatePCJ extends AccumuloCommand implements CreatePCJ {
                 fluoAppName);) {
             // Initialize the PCJ within the Fluo application.
             final CreateFluoPcj fluoCreatePcj = new CreateFluoPcj();
-            fluoCreatePcj.withRyaIntegration(pcjId, pcjStorage, fluoClient, getConnector(), ryaInstance);
+            fluoCreatePcj.withRyaIntegration(pcjId, sparql, strategies, fluoClient, getConnector(), ryaInstance);
         }
     }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePCJ.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePCJ.java b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePCJ.java
index eb2b2d7..547254d 100644
--- a/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePCJ.java
+++ b/extras/indexing/src/main/java/org/apache/rya/api/client/accumulo/AccumuloDeletePCJ.java
@@ -123,7 +123,11 @@ public class AccumuloDeletePCJ extends AccumuloCommand implements DeletePCJ {
                 cd.getZookeepers(),
                 fluoAppName)) {
             // Delete the PCJ from the Fluo App.
-            new DeleteFluoPcj(1000).deletePcj(fluoClient, pcjId);
+            try {
+                new DeleteFluoPcj(1000).deletePcj(fluoClient, pcjId);
+            } catch (Exception e) {
+                log.warn("PcjId corresponds to an invalid PCJ. The query cannot be deleted.");
+            }
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/indexing/src/test/java/org/apache/rya/api/client/accumulo/FluoITBase.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/org/apache/rya/api/client/accumulo/FluoITBase.java b/extras/indexing/src/test/java/org/apache/rya/api/client/accumulo/FluoITBase.java
index 113b397..695704b 100644
--- a/extras/indexing/src/test/java/org/apache/rya/api/client/accumulo/FluoITBase.java
+++ b/extras/indexing/src/test/java/org/apache/rya/api/client/accumulo/FluoITBase.java
@@ -199,7 +199,7 @@ public abstract class FluoITBase {
         final HashMap<String, String> params = new HashMap<>();
 
         final RyaExportParameters ryaParams = new RyaExportParameters(params);
-        ryaParams.setExportToRya(true);
+        ryaParams.setUseRyaBindingSetExporter(true);
         ryaParams.setAccumuloInstanceName(instanceName);
         ryaParams.setZookeeperServers(zookeepers);
         ryaParams.setExporterUsername(clusterInstance.getUsername());

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/indexingExample/src/main/java/RyaClientExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/RyaClientExample.java b/extras/indexingExample/src/main/java/RyaClientExample.java
index 1b0450f..b0afd5a 100644
--- a/extras/indexingExample/src/main/java/RyaClientExample.java
+++ b/extras/indexingExample/src/main/java/RyaClientExample.java
@@ -249,7 +249,7 @@ public class RyaClientExample {
         // export observer.
         final HashMap<String, String> params = new HashMap<>();
         final RyaExportParameters ryaParams = new RyaExportParameters(params);
-        ryaParams.setExportToRya(true);
+        ryaParams.setUseRyaBindingSetExporter(true);
         ryaParams.setAccumuloInstanceName(instanceName);
         ryaParams.setZookeeperServers(zookeepers);
         ryaParams.setExporterUsername(username);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/ShiftVarOrderFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/ShiftVarOrderFactory.java b/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/ShiftVarOrderFactory.java
index 26c4339..e297ec9 100644
--- a/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/ShiftVarOrderFactory.java
+++ b/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/ShiftVarOrderFactory.java
@@ -46,6 +46,7 @@ public class ShiftVarOrderFactory implements PcjVarOrderFactory {
         final Set<String> bindingNames = new SPARQLParser().parseQuery(sparql, null)
                 .getTupleExpr()
                 .getBindingNames();
+        
 
         return makeVarOrders( new VariableOrder(bindingNames) );
     }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java
index 150a256..501f1f5 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/CreateFluoPcj.java
@@ -27,7 +27,6 @@ import java.io.UnsupportedEncodingException;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-import java.util.UUID;
 
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
@@ -38,6 +37,7 @@ import org.apache.fluo.api.client.Transaction;
 import org.apache.log4j.Logger;
 import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 import org.apache.rya.accumulo.query.AccumuloRyaQueryEngine;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.api.domain.RyaStatement;
 import org.apache.rya.api.domain.RyaType;
 import org.apache.rya.api.domain.RyaURI;
@@ -50,6 +50,8 @@ import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.SparqlFluoQueryBuilder;
 import org.apache.rya.indexing.pcj.fluo.app.query.StatementPatternMetadata;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
+import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.apache.rya.indexing.pcj.storage.PcjException;
 import org.apache.rya.indexing.pcj.storage.PcjMetadata;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
@@ -61,6 +63,7 @@ import org.openrdf.query.MalformedQueryException;
 import org.openrdf.query.algebra.StatementPattern;
 
 import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
 
 import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
 import edu.umd.cs.findbugs.annotations.NonNull;
@@ -128,60 +131,92 @@ public class CreateFluoPcj {
     /**
      * Tells the Fluo PCJ Updater application to maintain a new PCJ. This method
      * creates the FluoQuery (metadata) inside of Fluo so that results can be incrementally generated
-     * inside of Fluo.  This method assumes that the user will export the results to Kafka or
-     * some other external resource.  The export id is equivalent to the queryId that is returned,
-     * which is in contrast to the other createPcj methods in this class which accept an external pcjId
-     * that is used to identify the Accumulo table or Kafka topic for exporting results.
+     * inside of Fluo.  This method assumes that the user will export the results to Kafka 
+     * according to the Kafka {@link ExportStrategy}.  
      *
      * @param sparql - sparql query String to be registered with Fluo
      * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
      * @return The metadata that was written to the Fluo application for the PCJ.
      * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed.
+     * @throws UnsupportedQueryException 
      * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
      */
-    public FluoQuery createPcj(String sparql, FluoClient fluo) throws MalformedQueryException {
+    public FluoQuery createPcj(String sparql, FluoClient fluo) throws MalformedQueryException, UnsupportedQueryException {
         Preconditions.checkNotNull(sparql);
         Preconditions.checkNotNull(fluo);
         
-        String pcjId = UUID.randomUUID().toString().replaceAll("-", "");
-        return createPcj(pcjId, sparql, fluo);
+        String pcjId = FluoQueryUtils.createNewPcjId();
+        return createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.KAFKA), fluo);
     }
     
     /**
      * Tells the Fluo PCJ Updater application to maintain a new PCJ.  This method provides
      * no guarantees that a PCJ with the given pcjId exists outside of Fluo. This method merely
-     * creates the FluoQuery (metadata) inside of Fluo so that results and be incrementally generated
-     * inside of Fluo.  This method assumes that the user will export the results to Kafka or
-     * some other external resource.
+     * creates the FluoQuery (metadata) inside of Fluo so that results can be incrementally generated
+     * inside of Fluo.  Results are exported according to the Set of {@link ExportStrategy} enums.  If
+     * the Rya ExportStrategy is specified, care should be taken to verify that the PCJ table exists.
      *
      * @param pcjId - Identifies the PCJ that will be updated by the Fluo app. (not null)
      * @param sparql - sparql query String to be registered with Fluo
+     * @param strategies - ExportStrategies used to specify how final results will be handled
      * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
      * @return The metadata that was written to the Fluo application for the PCJ.
-     * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
+     * @throws UnsupportedQueryException 
+     * @throws MalformedQueryException
      */
     public FluoQuery createPcj(
             final String pcjId,
             final String sparql,
-            final FluoClient fluo) throws MalformedQueryException {
+            final Set<ExportStrategy> strategies,
+            final FluoClient fluo) throws MalformedQueryException, UnsupportedQueryException {
         requireNonNull(pcjId);
         requireNonNull(sparql);
+        requireNonNull(strategies);
         requireNonNull(fluo);
 
-        FluoQuery fluoQuery = makeFluoQuery(sparql, pcjId);
+        FluoQuery fluoQuery = makeFluoQuery(sparql, pcjId, strategies);
         writeFluoQuery(fluo, fluoQuery, pcjId);
 
         return fluoQuery;
     }
     
-    private FluoQuery makeFluoQuery(String sparql, String pcjId) throws MalformedQueryException {
+    /**
+     * Tells the Fluo PCJ Updater application to maintain a new PCJ.  The method takes in an
+     * instance of {@link PrecomputedJoinStorage} to verify that a PCJ with the given pcjId exists.
+     * Results are exported to a PCJ table with the provided pcjId according to the Rya
+     * {@link ExportStrategy}.
+     *
+     * @param pcjId - Identifies the PCJ that will be updated by the Fluo app. (not null)
+     * @param pcjStorage - Provides access to the PCJ index. (not null)
+     * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
+     * @return The metadata that was written to the Fluo application for the PCJ.
+     * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed.
+     * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
+     * @throws UnsupportedQueryException 
+     */
+    public FluoQuery createPcj(
+            final String pcjId,
+            final PrecomputedJoinStorage pcjStorage,
+            final FluoClient fluo) throws MalformedQueryException, PcjException, UnsupportedQueryException {
+        requireNonNull(pcjId);
+        requireNonNull(pcjStorage);
+        requireNonNull(fluo);
+
+        // Parse the query's structure for the metadata that will be written to fluo.
+        final PcjMetadata pcjMetadata = pcjStorage.getPcjMetadata(pcjId);
+        final String sparql = pcjMetadata.getSparql();
+        return createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.RYA), fluo);
+    }
+    
+    private FluoQuery makeFluoQuery(String sparql, String pcjId, Set<ExportStrategy> strategies) throws MalformedQueryException, UnsupportedQueryException {
         
         String queryId = NodeType.generateNewIdForType(NodeType.QUERY, pcjId);
         
-        SparqlFluoQueryBuilder builder = new SparqlFluoQueryBuilder();
-        builder.setFluoQueryId(queryId);
-        builder.setSparql(sparql);
-        builder.setJoinBatchSize(joinBatchSize);
+        SparqlFluoQueryBuilder builder = new SparqlFluoQueryBuilder()
+                .setExportStrategies(strategies)
+                .setFluoQueryId(queryId)
+                .setSparql(sparql)
+                .setJoinBatchSize(joinBatchSize);
         
         return builder.build();
     }
@@ -195,56 +230,72 @@ public class CreateFluoPcj {
             tx.commit();
         }
     }
-
     
     /**
-     * Tells the Fluo PCJ Updater application to maintain a new PCJ.  The method takes in an
-     * instance of {@link PrecomputedJoinStorage} to verify that a PCJ with the given pcjId exists.
+     * Tells the Fluo PCJ Updater application to maintain a new PCJ.
+     * <p>
+     * This call scans Rya for Statement Pattern matches and inserts them into
+     * the Fluo application. It is assumed that results for any query registered
+     * using this method will be exported to Kafka according to the Kafka {@link ExportStrategy}.
      *
-     * @param pcjId - Identifies the PCJ that will be updated by the Fluo app. (not null)
-     * @param pcjStorage - Provides access to the PCJ index. (not null)
+     * @param sparql - sparql query that will registered with Fluo. (not null)
      * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
-     * @return The metadata that was written to the Fluo application for the PCJ.
+     * @param accumulo - Accumulo connector for connecting with Accumulo
+     * @param ryaInstance - Name of Rya instance to connect to
+     * @return The Fluo application's Query ID of the query that was created.
      * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed.
      * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
+     * @throws RyaDAOException Historic PCJ results could not be loaded because of a problem with {@code rya}.
+     * @throws UnsupportedQueryException 
      */
-    public FluoQuery createPcj(
-            final String pcjId,
-            final PrecomputedJoinStorage pcjStorage,
-            final FluoClient fluo) throws MalformedQueryException, PcjException {
-        requireNonNull(pcjId);
-        requireNonNull(pcjStorage);
+    public String withRyaIntegration(
+            final String sparql,
+            final FluoClient fluo,
+            final Connector accumulo,
+            final String ryaInstance ) throws MalformedQueryException, PcjException, RyaDAOException, UnsupportedQueryException {
+        requireNonNull(sparql);
         requireNonNull(fluo);
+        requireNonNull(accumulo);
+        requireNonNull(ryaInstance);
 
-        // Parse the query's structure for the metadata that will be written to fluo.
-        final PcjMetadata pcjMetadata = pcjStorage.getPcjMetadata(pcjId);
-        final String sparql = pcjMetadata.getSparql();
-        return createPcj(pcjId, sparql, fluo);
+        
+        // Write the SPARQL query's structure to the Fluo Application.
+        final FluoQuery fluoQuery = createPcj(sparql, fluo);
+        //import results already ingested into Rya that match query
+        importHistoricResultsIntoFluo(fluo, fluoQuery, accumulo, ryaInstance);
+        // return queryId to the caller for later monitoring from the export.
+        return fluoQuery.getQueryMetadata().getNodeId();
     }
     
+    
     /**
      * Tells the Fluo PCJ Updater application to maintain a new PCJ.
      * <p>
      * This call scans Rya for Statement Pattern matches and inserts them into
      * the Fluo application. This method does not verify that a PcjTable with the
-     * the given pcjId actually exists. It is assumed that results for any query registered
-     * using this method will be exported to Kafka or some other external service.
+     * the given pcjId actually exists, so one should verify that the table exists before
+     * using the Rya ExportStrategy. Results will be exported according to the Set of
+     * {@link ExportStrategy} enums.
      *
      * @param pcjId - Identifies the PCJ that will be updated by the Fluo app. (not null)
      * @param sparql - sparql query that will registered with Fluo. (not null)
+     * @param strategies - ExportStrategies used to specify how final results will be handled
      * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
-     * @param queryEngine - QueryEngine for a given Rya Instance, (not null)
+     * @param accumulo - Accumulo connector for connecting with Accumulo
+     * @param ryaInstance - name of Rya instance to connect to
      * @return The Fluo application's Query ID of the query that was created.
      * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed.
      * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
      * @throws RyaDAOException Historic PCJ results could not be loaded because of a problem with {@code rya}.
+     * @throws UnsupportedQueryException 
      */
     public String withRyaIntegration(
             final String pcjId,
             final String sparql,
+            final Set<ExportStrategy> strategies,
             final FluoClient fluo,
             final Connector accumulo,
-            final String ryaInstance ) throws MalformedQueryException, PcjException, RyaDAOException {
+            final String ryaInstance ) throws MalformedQueryException, PcjException, RyaDAOException, UnsupportedQueryException {
         requireNonNull(pcjId);
         requireNonNull(sparql);
         requireNonNull(fluo);
@@ -253,14 +304,13 @@ public class CreateFluoPcj {
 
         
         // Write the SPARQL query's structure to the Fluo Application.
-        final FluoQuery fluoQuery = createPcj(pcjId, sparql, fluo);
+        final FluoQuery fluoQuery = createPcj(pcjId, sparql, strategies, fluo);
         //import results already ingested into Rya that match query
         importHistoricResultsIntoFluo(fluo, fluoQuery, accumulo, ryaInstance);
         // return queryId to the caller for later monitoring from the export.
         return fluoQuery.getQueryMetadata().getNodeId();
     }
     
-
     /**
      * Tells the Fluo PCJ Updater application to maintain a new PCJ.
      * <p>
@@ -268,24 +318,26 @@ public class CreateFluoPcj {
      * the Fluo application. The Fluo application will then maintain the intermediate
      * results as new triples are inserted and export any new query results to the
      * {@code pcjId} within the provided {@code pcjStorage}.  This method requires that a
-     * PCJ table already exist for the query corresponding to the pcjId.  Results will be exported
-     * to this table.
+     * PCJ table already exist for the query corresponding to the pcjId.  By default, results will be exported
+     * to this table according to the Rya {@link ExportStrategy}.
      *
      * @param pcjId - Identifies the PCJ that will be updated by the Fluo app. (not null)
      * @param pcjStorage - Provides access to the PCJ index. (not null)
      * @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
-     * @param queryEngine - QueryEngine for a given Rya Instance, (not null)
+     * @param accumulo - Accumuo connector for connecting to Accumulo
+     * @param ryaInstance - name of Rya instance to connect to
      * @return The Fluo application's Query ID of the query that was created.
      * @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed.
      * @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
      * @throws RyaDAOException Historic PCJ results could not be loaded because of a problem with {@code rya}.
+     * @throws UnsupportedQueryException 
      */
     public String withRyaIntegration(
             final String pcjId,
             final PrecomputedJoinStorage pcjStorage,
             final FluoClient fluo,
             final Connector accumulo,
-            final String ryaInstance ) throws MalformedQueryException, PcjException, RyaDAOException {
+            final String ryaInstance ) throws MalformedQueryException, PcjException, RyaDAOException, UnsupportedQueryException {
         requireNonNull(pcjId);
         requireNonNull(pcjStorage);
         requireNonNull(fluo);
@@ -296,9 +348,11 @@ public class CreateFluoPcj {
         final PcjMetadata pcjMetadata = pcjStorage.getPcjMetadata(pcjId);
         final String sparql = pcjMetadata.getSparql();
         
-        return withRyaIntegration(pcjId, sparql, fluo, accumulo, ryaInstance);
+        return withRyaIntegration(pcjId, sparql, Sets.newHashSet(ExportStrategy.RYA), fluo, accumulo, ryaInstance);
     }
     
+    
+    
     private void importHistoricResultsIntoFluo(FluoClient fluo, FluoQuery fluoQuery, Connector accumulo, String ryaInstance)
             throws RyaDAOException {
         // Reuse the same set object while performing batch inserts.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeleteFluoPcj.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeleteFluoPcj.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeleteFluoPcj.java
index 58a52fb..0d97b2f 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeleteFluoPcj.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/DeleteFluoPcj.java
@@ -21,7 +21,6 @@ package org.apache.rya.indexing.pcj.fluo.api;
 import static com.google.common.base.Preconditions.checkArgument;
 import static java.util.Objects.requireNonNull;
 
-import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
@@ -33,15 +32,10 @@ import org.apache.fluo.api.data.Column;
 import org.apache.fluo.api.data.RowColumnValue;
 import org.apache.fluo.api.data.Span;
 import org.apache.rya.indexing.pcj.fluo.app.NodeType;
-import org.apache.rya.indexing.pcj.fluo.app.query.AggregationMetadata;
-import org.apache.rya.indexing.pcj.fluo.app.query.ConstructQueryMetadata;
-import org.apache.rya.indexing.pcj.fluo.app.query.FilterMetadata;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
+import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
-import org.apache.rya.indexing.pcj.fluo.app.query.JoinMetadata;
-import org.apache.rya.indexing.pcj.fluo.app.query.PeriodicQueryMetadata;
-import org.apache.rya.indexing.pcj.fluo.app.query.ProjectionMetadata;
-import org.apache.rya.indexing.pcj.fluo.app.query.QueryMetadata;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
+import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.openrdf.query.BindingSet;
 
 import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
@@ -85,8 +79,9 @@ public class DeleteFluoPcj {
      *            Index. (not null)
      * @param pcjId - The PCJ ID for the query that will removed from the Fluo
      *            application. (not null)
+     * @throws UnsupportedQueryException 
      */
-    public void deletePcj(final FluoClient client, final String pcjId) {
+    public void deletePcj(final FluoClient client, final String pcjId) throws UnsupportedQueryException {
         requireNonNull(client);
         requireNonNull(pcjId);
 
@@ -109,84 +104,17 @@ public class DeleteFluoPcj {
      * @param tx - Transaction of a given Fluo table. (not null)
      * @param pcjId - Id of query. (not null)
      * @return list of Node IDs associated with the query {@code pcjId}.
+     * @throws UnsupportedQueryException 
      */
-    private List<String> getNodeIds(Transaction tx, String pcjId) {
+    private List<String> getNodeIds(Transaction tx, String pcjId) throws UnsupportedQueryException {
         requireNonNull(tx);
         requireNonNull(pcjId);
 
-        // Get the ID that tracks the query within the Fluo application.
-        final String queryId = getQueryIdFromPcjId(tx, pcjId);
-
-        // Get the query's children nodes.
-        final List<String> nodeIds = new ArrayList<>();
-        nodeIds.add(queryId);
-        getChildNodeIds(tx, queryId, nodeIds);
-        return nodeIds;
+        String queryId = NodeType.generateNewIdForType(NodeType.QUERY, pcjId);
+        FluoQuery fluoQuery = dao.readFluoQuery(tx, queryId);
+        return FluoQueryUtils.collectNodeIds(fluoQuery);
     }
 
-    /**
-     * Recursively navigate query tree to extract all of the nodeIds.
-     *
-     * @param tx - Transaction of a given Fluo table. (not null)
-     * @param nodeId - Current node in query tree. (not null)
-     * @param nodeIds - The Node IDs extracted from query tree. (not null)
-     */
-    private void getChildNodeIds(final Transaction tx, final String nodeId, final List<String> nodeIds) {
-        requireNonNull(tx);
-        requireNonNull(nodeId);
-        requireNonNull(nodeIds);
-
-        final NodeType type = NodeType.fromNodeId(nodeId).get();
-        switch (type) {
-            case QUERY:
-                final QueryMetadata queryMeta = dao.readQueryMetadata(tx, nodeId);
-                final String queryChild = queryMeta.getChildNodeId();
-                nodeIds.add(queryChild);
-                getChildNodeIds(tx, queryChild, nodeIds);
-                break;
-            case CONSTRUCT:
-                final ConstructQueryMetadata constructMeta = dao.readConstructQueryMetadata(tx, nodeId);
-                final String constructChild = constructMeta.getChildNodeId();
-                nodeIds.add(constructChild);
-                getChildNodeIds(tx, constructChild, nodeIds);
-                break;
-            case JOIN:
-                final JoinMetadata joinMeta = dao.readJoinMetadata(tx, nodeId);
-                final String lchild = joinMeta.getLeftChildNodeId();
-                final String rchild = joinMeta.getRightChildNodeId();
-                nodeIds.add(lchild);
-                nodeIds.add(rchild);
-                getChildNodeIds(tx, lchild, nodeIds);
-                getChildNodeIds(tx, rchild, nodeIds);
-                break;
-            case FILTER:
-                final FilterMetadata filterMeta = dao.readFilterMetadata(tx, nodeId);
-                final String filterChild = filterMeta.getChildNodeId();
-                nodeIds.add(filterChild);
-                getChildNodeIds(tx, filterChild, nodeIds);
-                break;
-            case AGGREGATION:
-                final AggregationMetadata aggMeta = dao.readAggregationMetadata(tx, nodeId);
-                final String aggChild = aggMeta.getChildNodeId();
-                nodeIds.add(aggChild);
-                getChildNodeIds(tx, aggChild, nodeIds);
-                break;
-            case PERIODIC_QUERY:
-                final PeriodicQueryMetadata periodicMeta = dao.readPeriodicQueryMetadata(tx, nodeId);
-                final String periodicChild = periodicMeta.getChildNodeId();
-                nodeIds.add(periodicChild);
-                getChildNodeIds(tx, periodicChild, nodeIds);
-                break;
-            case PROJECTION:
-                final ProjectionMetadata projectionMetadata = dao.readProjectionMetadata(tx, nodeId);
-                final String projectionChild = projectionMetadata.getChildNodeId();
-                nodeIds.add(projectionChild);
-                getChildNodeIds(tx, projectionChild, nodeIds);
-                break;
-            case STATEMENT_PATTERN:
-                break;
-        }
-    }
 
     /**
      * Deletes metadata for all nodeIds associated with a given queryId in a
@@ -203,8 +131,6 @@ public class DeleteFluoPcj {
         requireNonNull(pcjId);
 
         try (final Transaction typeTx = tx) {
-            deletePcjIdAndSparqlMetadata(typeTx, pcjId);
-
             for (final String nodeId : nodeIds) {
                 final NodeType type = NodeType.fromNodeId(nodeId).get();
                 deleteMetadataColumns(typeTx, nodeId, type.getMetaDataColumns());
@@ -232,24 +158,6 @@ public class DeleteFluoPcj {
     }
 
     /**
-     * Deletes high level query meta for converting from queryId to pcjId and
-     * vice versa, as well as converting from sparql to queryId.
-     *
-     * @param tx - Transaction the deletes will be performed with. (not null)
-     * @param pcjId - The PCJ whose metadata will be deleted. (not null)
-     */
-    private void deletePcjIdAndSparqlMetadata(final Transaction tx, final String pcjId) {
-        requireNonNull(tx);
-        requireNonNull(pcjId);
-
-        final String queryId = getQueryIdFromPcjId(tx, pcjId);
-        final String sparql = getSparqlFromQueryId(tx, queryId);
-        tx.delete(queryId, FluoQueryColumns.RYA_PCJ_ID);
-        tx.delete(sparql, FluoQueryColumns.QUERY_ID);
-        tx.delete(pcjId, FluoQueryColumns.PCJ_ID_QUERY_ID);
-    }
-
-    /**
      * Deletes all results (BindingSets or Statements) associated with the specified nodeId.
      *
      * @param nodeId - nodeId whose {@link BindingSet}s will be deleted. (not null)
@@ -294,19 +202,4 @@ public class DeleteFluoPcj {
         }
     }
 
-    private String getQueryIdFromPcjId(final Transaction tx, final String pcjId) {
-        requireNonNull(tx);
-        requireNonNull(pcjId);
-
-        final Bytes queryIdBytes = tx.get(Bytes.of(pcjId), FluoQueryColumns.PCJ_ID_QUERY_ID);
-        return queryIdBytes.toString();
-    }
-
-    private String getSparqlFromQueryId(final Transaction tx, final String queryId) {
-        requireNonNull(tx);
-        requireNonNull(queryId);
-
-        final QueryMetadata metadata = dao.readQueryMetadata(tx, queryId);
-        return metadata.getSparql();
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadata.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadata.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadata.java
index 061a1d5..d08cb73 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadata.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetPcjMetadata.java
@@ -24,15 +24,13 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
+import org.apache.fluo.api.client.FluoClient;
+import org.apache.fluo.api.client.Snapshot;
+import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.apache.rya.indexing.pcj.storage.PcjException;
 import org.apache.rya.indexing.pcj.storage.PcjMetadata;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage;
 
-import org.apache.fluo.api.client.FluoClient;
-import org.apache.fluo.api.client.Snapshot;
-import org.apache.fluo.api.data.Bytes;
-
 /**
  * Get {@link PcjMetadata} for queries that are managed by the Fluo app.
  */
@@ -87,7 +85,7 @@ public class GetPcjMetadata {
         // Lookup the Rya PCJ ID associated with the query.
         String pcjId = null;
         try(Snapshot snap = fluo.newSnapshot() ) {
-            pcjId = snap.gets(queryId, FluoQueryColumns.RYA_PCJ_ID);
+            pcjId = FluoQueryUtils.convertFluoQueryIdToPcjId(queryId);
             if(pcjId == null) {
                 throw new NotInFluoException("Could not get the PcjMetadata for queryId '" + queryId +
                         "' because a Rya PCJ ID not stored in the Fluo table.");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetQueryReport.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetQueryReport.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetQueryReport.java
index 1fb1485..ddbaaaf 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetQueryReport.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/GetQueryReport.java
@@ -25,27 +25,27 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import edu.umd.cs.findbugs.annotations.Nullable;
-import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
-import edu.umd.cs.findbugs.annotations.NonNull;
-import net.jcip.annotations.Immutable;
-
+import org.apache.fluo.api.client.FluoClient;
+import org.apache.fluo.api.client.Snapshot;
+import org.apache.fluo.api.client.SnapshotBase;
+import org.apache.fluo.api.client.scanner.ColumnScanner;
+import org.apache.fluo.api.client.scanner.RowScanner;
+import org.apache.fluo.api.data.Column;
+import org.apache.fluo.api.data.Span;
 import org.apache.rya.indexing.pcj.fluo.app.query.FilterMetadata;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
 import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryMetadataDAO;
 import org.apache.rya.indexing.pcj.fluo.app.query.JoinMetadata;
 import org.apache.rya.indexing.pcj.fluo.app.query.StatementPatternMetadata;
+import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 
 import com.google.common.collect.ImmutableMap;
 
-import org.apache.fluo.api.client.FluoClient;
-import org.apache.fluo.api.client.Snapshot;
-import org.apache.fluo.api.client.SnapshotBase;
-import org.apache.fluo.api.client.scanner.ColumnScanner;
-import org.apache.fluo.api.client.scanner.RowScanner;
-import org.apache.fluo.api.data.Column;
-import org.apache.fluo.api.data.Span;
+import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
+import edu.umd.cs.findbugs.annotations.NonNull;
+import edu.umd.cs.findbugs.annotations.Nullable;
+import net.jcip.annotations.Immutable;
 
 /**
  * Get a reports that indicates how many binding sets have been emitted for
@@ -63,8 +63,9 @@ public class GetQueryReport {
      * @param fluo - The connection to Fluo that will be used to fetch the metadata. (not null)
      * @return A map from Query ID to QueryReport that holds a report for all of
      *   the queries that are being managed within the fluo app.
+     * @throws UnsupportedQueryException 
      */
-    public Map<String, QueryReport> getAllQueryReports(final FluoClient fluo) {
+    public Map<String, QueryReport> getAllQueryReports(final FluoClient fluo) throws UnsupportedQueryException {
         checkNotNull(fluo);
 
         // Fetch the queries that are being managed by the Fluo.
@@ -85,8 +86,9 @@ public class GetQueryReport {
      * @param fluo - The connection to Fluo that will be used to fetch the metadata. (not null)
      * @param queryId - The ID of the query to fetch. (not null)
      * @return A report that was built for the query.
+     * @throws UnsupportedQueryException 
      */
-    public QueryReport getReport(final FluoClient fluo, final String queryId) {
+    public QueryReport getReport(final FluoClient fluo, final String queryId) throws UnsupportedQueryException {
         checkNotNull(fluo);
         checkNotNull(queryId);
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIds.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIds.java b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIds.java
index df1648b..e09d0c6 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIds.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.api/src/main/java/org/apache/rya/indexing/pcj/fluo/api/ListQueryIds.java
@@ -52,7 +52,7 @@ public class ListQueryIds {
 
         try(Snapshot snap = fluo.newSnapshot() ) {
             // Create an iterator that iterates over the QUERY_ID column.
-            final CellScanner cellScanner = snap.scanner().fetch( FluoQueryColumns.QUERY_ID).build();
+            final CellScanner cellScanner = snap.scanner().fetch( FluoQueryColumns.QUERY_NODE_ID).build();
 
             for (RowColumnValue rcv : cellScanner) {
             	queryIds.add(rcv.getsValue());

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java
index 4b6f44e..c090d37 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/IncrementalUpdateConstants.java
@@ -39,9 +39,6 @@ public class IncrementalUpdateConstants {
     public static final String CONSTRUCT_PREFIX = "CONSTRUCT";
     public static final String PERIODIC_QUERY_PREFIX = "PERIODIC_QUERY";
     
-    public static enum QueryType{Construct, Projection, Periodic};
-    public static enum ExportStrategy{Rya, Kafka};
-    
     public static final String PERIODIC_BIN_ID = PeriodicQueryResultStorage.PeriodicBinId;
 
     public static final String URI_TYPE = "http://www.w3.org/2001/XMLSchema#anyURI";

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java
new file mode 100644
index 0000000..62f1271
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/ExporterManager.java
@@ -0,0 +1,216 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.fluo.api.data.Bytes;
+import org.apache.rya.accumulo.utils.VisibilitySimplifier;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
+import org.apache.rya.api.domain.RyaStatement;
+import org.apache.rya.api.domain.RyaSubGraph;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
+import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter.ResultExportException;
+import org.apache.rya.indexing.pcj.fluo.app.export.kafka.RyaSubGraphKafkaSerDe;
+import org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery;
+import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
+import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
+import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSetSerDe;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * This class manages all of the {@link IncrementalResultExporter}s for the Rya Fluo Application.
+ * It maps the {@link FluoQuery}'s {@link QueryType} and Set of {@link ExportStrategy} objects
+ * to the correct IncrementalResultExporter. 
+ *
+ */
+public class ExporterManager implements AutoCloseable {
+
+    private static final VisibilityBindingSetSerDe BS_SERDE = new VisibilityBindingSetSerDe();
+    private static final RyaSubGraphKafkaSerDe SG_SERDE = new RyaSubGraphKafkaSerDe();
+    private Map<String, String> simplifiedVisibilities = new HashMap<>();
+    
+    private Map<QueryType, Map<ExportStrategy, IncrementalResultExporter>> exporters;
+    
+    private ExporterManager(Map<QueryType, Map<ExportStrategy, IncrementalResultExporter>> exporters) {
+        this.exporters = Preconditions.checkNotNull(exporters);
+    }
+    
+    /**
+     * @return {@link Builder} for constructing an instance of an ExporterManager.
+     */
+    public static Builder builder() {
+        return new Builder();
+    }
+    
+    /**
+     * Maps the data to the correct {@link IncrementalResultExporter} using the provided
+     * QueryType and ExportStrategies to be exported.
+     * @param type - QueryType that produced the result
+     * @param strategies - ExportStrategies used to export the result
+     * @param queryId - Fluo Query Id for the query that produced the result
+     * @param data - Serialized result to be exported
+     * @throws ResultExportException 
+     */
+    public void export(QueryType type, Set<ExportStrategy> strategies, String queryId, Bytes data) throws ResultExportException {
+        
+        String pcjId = FluoQueryUtils.convertFluoQueryIdToPcjId(queryId);
+        
+        if(type == QueryType.CONSTRUCT) {
+            exportSubGraph(exporters.get(type), strategies, pcjId, data);
+        } else {
+            exportBindingSet(exporters.get(type), strategies, pcjId, data);
+        }
+        
+    }
+    
+    /**
+     * Exports BindingSet using the exporters for a given {@link QueryType}.
+     * @param exporters - exporters corresponding to a given queryType
+     * @param strategies - export strategies used to export results (possibly a subset of those in the exporters map)
+     * @param pcjId - id of the query whose results are being exported
+     * @param data - serialized BindingSet result
+     * @throws ResultExportException
+     */
+    private void exportBindingSet(Map<ExportStrategy, IncrementalResultExporter> exporters, Set<ExportStrategy> strategies, String pcjId, Bytes data) throws ResultExportException {
+        try {
+            VisibilityBindingSet bs = BS_SERDE.deserialize(data);
+            simplifyVisibilities(bs);
+            
+            for(ExportStrategy strategy: strategies) {
+                IncrementalBindingSetExporter exporter = (IncrementalBindingSetExporter) exporters.get(strategy);
+                exporter.export(pcjId, bs);
+            }
+        } catch (Exception e) {
+            throw new ResultExportException("Unable to deserialize the provided BindingSet", e);
+        }
+    }
+    
+    /**
+     * Exports RyaSubGraph using the exporters for a given {@link QueryType}.
+     * @param exporters - exporters corresponding to a given queryType
+     * @param strategies - export strategies used to export results (possibly a subset of those in the exporters map)
+     * @param pcjId - id of the query whose results are being exported
+     * @param data - serialized RyaSubGraph result
+     * @throws ResultExportException
+     */
+    private void exportSubGraph(Map<ExportStrategy, IncrementalResultExporter> exporters, Set<ExportStrategy> strategies, String pcjId, Bytes data) throws ResultExportException {
+        RyaSubGraph subGraph = SG_SERDE.fromBytes(data.toArray());
+        
+        try {
+            simplifyVisibilities(subGraph);
+        } catch (UnsupportedEncodingException e) {
+            throw new ResultExportException("Undable to deserialize provided RyaSubgraph", e);
+        }
+        
+        for(ExportStrategy strategy: strategies) {
+            IncrementalRyaSubGraphExporter exporter = (IncrementalRyaSubGraphExporter) exporters.get(strategy);
+            exporter.export(pcjId, subGraph);
+        }
+    }
+    
+    private void simplifyVisibilities(VisibilityBindingSet result) {
+        // Simplify the result's visibilities.
+        final String visibility = result.getVisibility();
+        if(!simplifiedVisibilities.containsKey(visibility)) {
+            final String simplified = VisibilitySimplifier.simplify( visibility );
+            simplifiedVisibilities.put(visibility, simplified);
+        }
+        result.setVisibility( simplifiedVisibilities.get(visibility) );
+    }
+    
+    private void simplifyVisibilities(RyaSubGraph subgraph) throws UnsupportedEncodingException {
+        Set<RyaStatement> statements = subgraph.getStatements();
+        if (statements.size() > 0) {
+            byte[] visibilityBytes = statements.iterator().next().getColumnVisibility();
+            // Simplify the result's visibilities and cache new simplified
+            // visibilities
+            String visibility = new String(visibilityBytes, "UTF-8");
+            if (!simplifiedVisibilities.containsKey(visibility)) {
+                String simplified = VisibilitySimplifier.simplify(visibility);
+                simplifiedVisibilities.put(visibility, simplified);
+            }
+
+            for (RyaStatement statement : statements) {
+                statement.setColumnVisibility(simplifiedVisibilities.get(visibility).getBytes("UTF-8"));
+            }
+            
+            subgraph.setStatements(statements);
+        }
+    }
+    
+    public static class Builder {
+        
+        private Map<QueryType, Map<ExportStrategy, IncrementalResultExporter>> exporters = new HashMap<>();
+        
+        /**
+         * Add an {@link IncrementalResultExporter} to be used by this ExporterManager for exporting results
+         * @param exporter - IncrementalResultExporter for exporting query results
+         * @return - Builder for chaining method calls
+         */
+        public Builder addIncrementalResultExporter(IncrementalResultExporter exporter) {
+            
+            Set<QueryType> types = exporter.getQueryTypes();
+            ExportStrategy strategy = exporter.getExportStrategy();
+            
+            for (QueryType type : types) {
+                if (!exporters.containsKey(type)) {
+                    Map<ExportStrategy, IncrementalResultExporter> exportMap = new HashMap<>();
+                    exportMap.put(strategy, exporter);
+                    exporters.put(type, exportMap);
+                } else {
+                    Map<ExportStrategy, IncrementalResultExporter> exportMap = exporters.get(type);
+                    if (!exportMap.containsKey(strategy)) {
+                        exportMap.put(strategy, exporter);
+                    }
+                }
+            }
+            
+            return this;
+        }
+        
+        /**
+         * @return - ExporterManager for managing IncrementalResultExporters and exporting results
+         */
+        public ExporterManager build() {
+            //adds NoOpExporter in the event that users does not want to Export results
+            addIncrementalResultExporter(new NoOpExporter());
+            return new ExporterManager(exporters);
+        }
+        
+    }
+
+    @Override
+    public void close() throws Exception {
+        
+        Collection<Map<ExportStrategy, IncrementalResultExporter>> values = exporters.values();
+        
+        for(Map<ExportStrategy, IncrementalResultExporter> map: values) {
+            for(IncrementalResultExporter exporter: map.values()) {
+                exporter.close();
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporter.java
index c2f4cb4..9877671 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporter.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporter.java
@@ -18,7 +18,6 @@
  */
 package org.apache.rya.indexing.pcj.fluo.app.export;
 
-import org.apache.fluo.api.client.TransactionBase;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 
 import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
@@ -29,17 +28,16 @@ import edu.umd.cs.findbugs.annotations.NonNull;
  * other location.
  */
 @DefaultAnnotation(NonNull.class)
-public interface IncrementalBindingSetExporter extends AutoCloseable {
+public interface IncrementalBindingSetExporter extends IncrementalResultExporter {
 
     /**
      * Export a Binding Set that is a result of a SPARQL query that does not include a Group By clause.
      *
-     * @param tx - The Fluo transaction this export is a part of. (not null)
-     * @param queryId - The Fluo ID of the SPARQL query the binding set is a result of. (not null)
+     * @param queryId - The PCJ ID of the SPARQL query the binding set is a result of. (not null)
      * @param bindingSetString - The Binding Set as it was represented within the Fluo application. (not null)
      * @throws ResultExportException The result could not be exported.
      */
-    public void export(TransactionBase tx, String queryId, VisibilityBindingSet result) throws ResultExportException;
+    public void export(String queryId, VisibilityBindingSet result) throws ResultExportException;
 
     /**
      * A result could not be exported.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporterFactory.java
deleted file mode 100644
index 1bf492a..0000000
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalBindingSetExporterFactory.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.indexing.pcj.fluo.app.export;
-
-import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
-import edu.umd.cs.findbugs.annotations.NonNull;
-
-import com.google.common.base.Optional;
-
-import org.apache.fluo.api.observer.Observer.Context;
-
-/**
- * Builds instances of {@link IncrementalBindingSetExporter} using the provided
- * configurations.
- */
-@DefaultAnnotation(NonNull.class)
-public interface IncrementalBindingSetExporterFactory {
-
-    /**
-     * Builds an instance of {@link IncrementalBindingSetExporter} using the
-     * configurations that are provided.
-     *
-     * @param context - Contains the host application's configuration values
-     *   and any parameters that were provided at initialization. (not null)
-     * @return An exporter if configurations were found in the context; otherwise absent.
-     * @throws IncrementalExporterFactoryException A non-configuration related
-     *   problem has occurred and the exporter could not be created as a result.
-     * @throws ConfigurationException Thrown if configuration values were
-     *   provided, but an instance of the exporter could not be initialized
-     *   using them. This could be because they were improperly formatted,
-     *   a required field was missing, or some other configuration based problem.
-     */
-    public Optional<IncrementalBindingSetExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException;
-
-    /**
-     * Indicates a {@link IncrementalBindingSetExporter} could not be created by a
-     * {@link IncrementalBindingSetExporterFactory}.
-     */
-    public static class IncrementalExporterFactoryException extends Exception {
-        private static final long serialVersionUID = 1L;
-
-        /**
-         * Constructs an instance of {@link }.
-         *
-         * @param message - Explains why this exception is being thrown.
-         */
-        public IncrementalExporterFactoryException(final String message) {
-            super(message);
-        }
-
-        /**
-         * Constructs an instance of {@link }.
-         *
-         * @param message - Explains why this exception is being thrown.
-         * @param cause - The exception that caused this one to be thrown.
-         */
-        public IncrementalExporterFactoryException(final String message, final Throwable t) {
-            super(message, t);
-        }
-    }
-
-    /**
-     * The configuration could not be interpreted because required fields were
-     * missing or a value wasn't properly formatted.
-     */
-    public static class ConfigurationException extends IncrementalExporterFactoryException {
-        private static final long serialVersionUID = 1L;
-
-        /**
-         * Constructs an instance of {@link ConfigurationException}.
-         *
-         * @param message - Explains why this exception is being thrown.
-         */
-        public ConfigurationException(final String message) {
-            super(message);
-        }
-
-        /**
-         * Constructs an instance of {@link ConfigurationException}.
-         *
-         * @param message - Explains why this exception is being thrown.
-         * @param cause - The exception that caused this one to be thrown.
-         */
-        public ConfigurationException(final String message, final Throwable cause) {
-            super(message, cause);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporter.java
new file mode 100644
index 0000000..e49a777
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporter.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export;
+
+import java.util.Set;
+
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
+
+/**
+ * Common interface for the different incremental exporters used in the Rya Fluo Application.
+ *
+ */
+public interface IncrementalResultExporter extends AutoCloseable {
+
+    /**
+     * @return - A Set of {@link QueryType}s whose results this exporter handles
+     */
+    public Set<QueryType> getQueryTypes();
+    
+    /**
+     * @return - The {@link ExportStrategy} indicating where results are exported
+     */
+    public ExportStrategy getExportStrategy();
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporterFactory.java
new file mode 100644
index 0000000..5bba4ab
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalResultExporterFactory.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export;
+
+import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
+import edu.umd.cs.findbugs.annotations.NonNull;
+
+import com.google.common.base.Optional;
+
+import org.apache.fluo.api.observer.Observer.Context;
+
+/**
+ * Builds instances of {@link IncrementalResultExporter} using the provided
+ * configurations.
+ */
+@DefaultAnnotation(NonNull.class)
+public interface IncrementalResultExporterFactory {
+
+    /**
+     * Builds an instance of {@link IncrementalResultExporter} using the
+     * configurations that are provided.
+     *
+     * @param context - Contains the host application's configuration values
+     *   and any parameters that were provided at initialization. (not null)
+     * @return An exporter if configurations were found in the context; otherwise absent.
+     * @throws IncrementalExporterFactoryException A non-configuration related
+     *   problem has occurred and the exporter could not be created as a result.
+     * @throws ConfigurationException Thrown if configuration values were
+     *   provided, but an instance of the exporter could not be initialized
+     *   using them. This could be because they were improperly formatted,
+     *   a required field was missing, or some other configuration based problem.
+     */
+    public Optional<IncrementalResultExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException;
+
+    /**
+     * Indicates a {@link IncrementalResultExporter} could not be created by a
+     * {@link IncrementalBindingSetExporterFactory}.
+     */
+    public static class IncrementalExporterFactoryException extends Exception {
+        private static final long serialVersionUID = 1L;
+
+        /**
+         * Constructs an instance of {@link }.
+         *
+         * @param message - Explains why this exception is being thrown.
+         */
+        public IncrementalExporterFactoryException(final String message) {
+            super(message);
+        }
+
+        /**
+         * Constructs an instance of {@link }.
+         *
+         * @param message - Explains why this exception is being thrown.
+         * @param cause - The exception that caused this one to be thrown.
+         */
+        public IncrementalExporterFactoryException(final String message, final Throwable t) {
+            super(message, t);
+        }
+    }
+
+    /**
+     * The configuration could not be interpreted because required fields were
+     * missing or a value wasn't properly formatted.
+     */
+    public static class ConfigurationException extends IncrementalExporterFactoryException {
+        private static final long serialVersionUID = 1L;
+
+        /**
+         * Constructs an instance of {@link ConfigurationException}.
+         *
+         * @param message - Explains why this exception is being thrown.
+         */
+        public ConfigurationException(final String message) {
+            super(message);
+        }
+
+        /**
+         * Constructs an instance of {@link ConfigurationException}.
+         *
+         * @param message - Explains why this exception is being thrown.
+         * @param cause - The exception that caused this one to be thrown.
+         */
+        public ConfigurationException(final String message, final Throwable cause) {
+            super(message, cause);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporter.java
index 797502c..7b7f084 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporter.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporter.java
@@ -25,7 +25,7 @@ import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter
  * from the Rya-Fluo application to the core Rya tables.
  *
  */
-public interface IncrementalRyaSubGraphExporter extends AutoCloseable {
+public interface IncrementalRyaSubGraphExporter extends IncrementalResultExporter {
 
     /**
      * Export a RyaSubGraph that is the result of SPARQL Construct Query.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporterFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporterFactory.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporterFactory.java
deleted file mode 100644
index ecbec09..0000000
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/IncrementalRyaSubGraphExporterFactory.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package org.apache.rya.indexing.pcj.fluo.app.export;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-import org.apache.fluo.api.observer.Observer.Context;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory.ConfigurationException;
-import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporterFactory.IncrementalExporterFactoryException;
-
-import com.google.common.base.Optional;
-
-/**
- * Builds instances of {@link IncrementalRyaSubGraphExporter} using the provided
- * configurations.
- */
-public interface IncrementalRyaSubGraphExporterFactory {
-
-    /**
-     * Builds an instance of {@link IncrementalRyaSubGraphExporter} using the
-     * configurations that are provided.
-     *
-     * @param context - Contains the host application's configuration values
-     *   and any parameters that were provided at initialization. (not null)
-     * @return An exporter if configurations were found in the context; otherwise absent.
-     * @throws IncrementalExporterFactoryException A non-configuration related
-     *   problem has occurred and the exporter could not be created as a result.
-     * @throws ConfigurationException Thrown if configuration values were
-     *   provided, but an instance of the exporter could not be initialized
-     *   using them. This could be because they were improperly formatted,
-     *   a required field was missing, or some other configuration based problem.
-     */
-    public Optional<IncrementalRyaSubGraphExporter> build(Context context) throws IncrementalExporterFactoryException, ConfigurationException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java
new file mode 100644
index 0000000..ab7f2ed
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/NoOpExporter.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.export;
+
+import java.util.Set;
+
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
+import org.apache.rya.api.domain.RyaSubGraph;
+import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
+
+import com.google.common.collect.Sets;
+
+/**
+ * This class is a NoOpExporter that can be specified if a user does not
+ * want their results exported from Fluo.
+ *
+ */
+public class NoOpExporter implements IncrementalBindingSetExporter, IncrementalRyaSubGraphExporter {
+
+    @Override
+    public Set<QueryType> getQueryTypes() {
+        return Sets.newHashSet(QueryType.CONSTRUCT, QueryType.PROJECTION);
+    }
+
+    @Override
+    public ExportStrategy getExportStrategy() {
+        return ExportStrategy.NO_OP_EXPORT;
+    }
+
+    @Override
+    public void close() throws Exception {
+    }
+
+    @Override
+    public void export(String constructID, RyaSubGraph subgraph) throws ResultExportException {
+    }
+
+    @Override
+    public void export(String queryId, VisibilityBindingSet result) throws ResultExportException {
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/05147266/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporter.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporter.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporter.java
index 7c4b3cc..0c26d65 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporter.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaBindingSetExporter.java
@@ -20,18 +20,21 @@ package org.apache.rya.indexing.pcj.fluo.app.export.kafka;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
+import java.util.Set;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.fluo.api.client.TransactionBase;
 import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.clients.producer.RecordMetadata;
 import org.apache.log4j.Logger;
+import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
+import org.apache.rya.api.client.CreatePCJ.QueryType;
 import org.apache.rya.indexing.pcj.fluo.app.export.IncrementalBindingSetExporter;
-import org.apache.rya.indexing.pcj.fluo.app.query.FluoQueryColumns;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 
+import com.google.common.collect.Sets;
+
 /**
  * Incrementally exports SPARQL query results to Kafka topics.
  */
@@ -57,17 +60,15 @@ public class KafkaBindingSetExporter implements IncrementalBindingSetExporter {
      * Send the results to the topic using the queryID as the topicname
      */
     @Override
-    public void export(final TransactionBase fluoTx, final String queryId, final VisibilityBindingSet result) throws ResultExportException {
-        checkNotNull(fluoTx);
+    public void export(final String queryId, final VisibilityBindingSet result) throws ResultExportException {
         checkNotNull(queryId);
         checkNotNull(result);
         try {
-            final String pcjId = fluoTx.gets(queryId, FluoQueryColumns.RYA_PCJ_ID);
-            final String msg = "out to kafta topic: queryId=" + queryId + " pcjId=" + pcjId + " result=" + result;
+            final String msg = "Out to Kafka topic: " + queryId + ", Result: " + result;
             log.trace(msg);
 
             // Send the result to the topic whose name matches the PCJ ID.
-            final ProducerRecord<String, VisibilityBindingSet> rec = new ProducerRecord<>(pcjId, result);
+            final ProducerRecord<String, VisibilityBindingSet> rec = new ProducerRecord<>(queryId, result);
             final Future<RecordMetadata> future = producer.send(rec);
 
             // Don't let the export return until the result has been written to the topic. Otherwise we may lose results.
@@ -84,4 +85,14 @@ public class KafkaBindingSetExporter implements IncrementalBindingSetExporter {
     public void close() throws Exception {
         producer.close(5, TimeUnit.SECONDS);
     }
+
+    @Override
+    public Set<QueryType> getQueryTypes() {
+        return Sets.newHashSet(QueryType.PROJECTION);
+    }
+
+    @Override
+    public ExportStrategy getExportStrategy() {
+        return ExportStrategy.KAFKA;
+    }
 }
\ No newline at end of file