You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by ma...@apache.org on 2015/04/29 18:21:08 UTC

[1/2] incubator-nifi git commit: NIFI-554: Catch FileNotFoundException and handle it when creating IndexSearcher

Repository: incubator-nifi
Updated Branches:
  refs/heads/develop 57e78bf72 -> 4baf48ae9


NIFI-554: Catch FileNotFoundException and handle it when creating IndexSearcher


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/20831c87
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/20831c87
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/20831c87

Branch: refs/heads/develop
Commit: 20831c87fc695683c4a796d3906eef2ec939a311
Parents: 57e78bf
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Apr 29 08:31:13 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Apr 29 08:31:13 2015 -0400

----------------------------------------------------------------------
 .../PersistentProvenanceRepository.java         |  54 +++++-----
 .../nifi/provenance/lucene/LineageQuery.java    | 103 ++++++++++---------
 2 files changed, 82 insertions(+), 75 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/20831c87/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
index fe89a5e..214fc7c 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
@@ -699,7 +699,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                 if (bytesWrittenSinceRollover.get() >= configuration.getMaxEventFileCapacity()) {
                     try {
                         rollover(false);
-                    } catch (IOException e) {
+                    } catch (final IOException e) {
                         logger.error("Failed to Rollover Provenance Event Repository file due to {}", e.toString());
                         logger.error("", e);
                         eventReporter.reportEvent(Severity.ERROR, EVENT_CATEGORY, "Failed to Rollover Provenance Event Repository file due to " + e.toString());
@@ -1001,7 +1001,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                         if (fileRolledOver == null) {
                             return;
                         }
-                        File file = fileRolledOver;
+                        final File file = fileRolledOver;
 
                         // update our map of id to Path
                         // need lock to update the map, even though it's an AtomicReference, AtomicReference allows those doing a
@@ -1010,7 +1010,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                         writeLock.lock();
                         try {
                             final Long fileFirstEventId = Long.valueOf(LuceneUtil.substringBefore(fileRolledOver.getName(), "."));
-                            SortedMap<Long, Path> newIdToPathMap = new TreeMap<>(new PathMapComparator());
+                            final SortedMap<Long, Path> newIdToPathMap = new TreeMap<>(new PathMapComparator());
                             newIdToPathMap.putAll(idToPathMap.get());
                             newIdToPathMap.put(fileFirstEventId, file.toPath());
                             idToPathMap.set(newIdToPathMap);
@@ -1452,11 +1452,11 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                     try (final DirectoryReader directoryReader = DirectoryReader.open(FSDirectory.open(indexDirectory))) {
                         final IndexSearcher searcher = new IndexSearcher(directoryReader);
 
-                        TopDocs topDocs = searcher.search(luceneQuery, 10000000);
+                        final TopDocs topDocs = searcher.search(luceneQuery, 10000000);
                         logger.info("For {}, Top Docs has {} hits; reading Lucene results", indexDirectory, topDocs.scoreDocs.length);
 
                         if (topDocs.totalHits > 0) {
-                            for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
+                            for (final ScoreDoc scoreDoc : topDocs.scoreDocs) {
                                 final int docId = scoreDoc.doc;
                                 final Document d = directoryReader.document(docId);
                                 localScoreDocs.add(d);
@@ -1649,16 +1649,16 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             }
 
             switch (event.getEventType()) {
-            case CLONE:
-            case FORK:
-            case JOIN:
-            case REPLAY:
-                return submitLineageComputation(event.getChildUuids(), LineageComputationType.EXPAND_CHILDREN, eventId, event.getEventTime(), Long.MAX_VALUE);
-            default:
-                final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.<String>emptyList(), 1);
-                lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
-                submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its children cannot be expanded");
-                return submission;
+                case CLONE:
+                case FORK:
+                case JOIN:
+                case REPLAY:
+                    return submitLineageComputation(event.getChildUuids(), LineageComputationType.EXPAND_CHILDREN, eventId, event.getEventTime(), Long.MAX_VALUE);
+                default:
+                    final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.<String>emptyList(), 1);
+                    lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
+                    submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its children cannot be expanded");
+                    return submission;
             }
         } catch (final IOException ioe) {
             final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_CHILDREN, eventId, Collections.<String>emptyList(), 1);
@@ -1686,17 +1686,17 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             }
 
             switch (event.getEventType()) {
-            case JOIN:
-            case FORK:
-            case CLONE:
-            case REPLAY:
-                return submitLineageComputation(event.getParentUuids(), LineageComputationType.EXPAND_PARENTS, eventId, 0L, event.getEventTime());
-            default: {
-                final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.<String>emptyList(), 1);
-                lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
-                submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its parents cannot be expanded");
-                return submission;
-            }
+                case JOIN:
+                case FORK:
+                case CLONE:
+                case REPLAY:
+                    return submitLineageComputation(event.getParentUuids(), LineageComputationType.EXPAND_PARENTS, eventId, 0L, event.getEventTime());
+                default: {
+                    final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.<String>emptyList(), 1);
+                    lineageSubmissionMap.put(submission.getLineageIdentifier(), submission);
+                    submission.getResult().setError("Event ID " + eventId + " indicates an event of type " + event.getEventType() + " so its parents cannot be expanded");
+                    return submission;
+                }
             }
         } catch (final IOException ioe) {
             final AsyncLineageSubmission submission = new AsyncLineageSubmission(LineageComputationType.EXPAND_PARENTS, eventId, Collections.<String>emptyList(), 1);
@@ -1880,7 +1880,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
             }
 
             try {
-                final Set<ProvenanceEventRecord> matchingRecords = LineageQuery.computeLineageForFlowFiles(PersistentProvenanceRepository.this, indexDir, null, flowFileUuids);
+                final Set<ProvenanceEventRecord> matchingRecords = LineageQuery.computeLineageForFlowFiles(PersistentProvenanceRepository.this, indexManager, indexDir, null, flowFileUuids);
                 final StandardLineageResult result = submission.getResult();
                 result.update(matchingRecords);
 

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/20831c87/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
index 3f75c00..5e4f69d 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
@@ -19,26 +19,23 @@ package org.apache.nifi.provenance.lucene;
 import static java.util.Objects.requireNonNull;
 
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.nifi.provenance.PersistentProvenanceRepository;
-import org.apache.nifi.provenance.ProvenanceEventRecord;
-import org.apache.nifi.provenance.SearchableFields;
-
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.FSDirectory;
+import org.apache.nifi.provenance.PersistentProvenanceRepository;
+import org.apache.nifi.provenance.ProvenanceEventRecord;
+import org.apache.nifi.provenance.SearchableFields;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -48,7 +45,7 @@ public class LineageQuery {
     public static final int MAX_LINEAGE_UUIDS = 100;
     private static final Logger logger = LoggerFactory.getLogger(LineageQuery.class);
 
-    public static Set<ProvenanceEventRecord> computeLineageForFlowFiles(final PersistentProvenanceRepository repo, final File indexDirectory,
+    public static Set<ProvenanceEventRecord> computeLineageForFlowFiles(final PersistentProvenanceRepository repo, final IndexManager indexManager, final File indexDirectory,
             final String lineageIdentifier, final Collection<String> flowFileUuids) throws IOException {
         if (requireNonNull(flowFileUuids).size() > MAX_LINEAGE_UUIDS) {
             throw new IllegalArgumentException(String.format("Cannot compute lineage for more than %s FlowFiles. This lineage contains %s.", MAX_LINEAGE_UUIDS, flowFileUuids.size()));
@@ -58,52 +55,62 @@ public class LineageQuery {
             throw new IllegalArgumentException("Must specify either Lineage Identifier or FlowFile UUIDs to compute lineage");
         }
 
-        try (final Directory fsDir = FSDirectory.open(indexDirectory);
-                final IndexReader indexReader = DirectoryReader.open(fsDir)) {
-
-            final IndexSearcher searcher = new IndexSearcher(indexReader);
-
-            // Create a query for all Events related to the FlowFiles of interest. We do this by adding all ID's as
-            // "SHOULD" clauses and then setting the minimum required to 1.
-            final BooleanQuery flowFileIdQuery;
-            if (flowFileUuids == null || flowFileUuids.isEmpty()) {
-                flowFileIdQuery = null;
-            } else {
-                flowFileIdQuery = new BooleanQuery();
-                for (final String flowFileUuid : flowFileUuids) {
-                    flowFileIdQuery.add(new TermQuery(new Term(SearchableFields.FlowFileUUID.getSearchableFieldName(), flowFileUuid)), Occur.SHOULD);
+        final IndexSearcher searcher;
+        try {
+            searcher = indexManager.borrowIndexSearcher(indexDirectory);
+            try {
+                // Create a query for all Events related to the FlowFiles of interest. We do this by adding all ID's as
+                // "SHOULD" clauses and then setting the minimum required to 1.
+                final BooleanQuery flowFileIdQuery;
+                if (flowFileUuids == null || flowFileUuids.isEmpty()) {
+                    flowFileIdQuery = null;
+                } else {
+                    flowFileIdQuery = new BooleanQuery();
+                    for (final String flowFileUuid : flowFileUuids) {
+                        flowFileIdQuery.add(new TermQuery(new Term(SearchableFields.FlowFileUUID.getSearchableFieldName(), flowFileUuid)), Occur.SHOULD);
+                    }
+                    flowFileIdQuery.setMinimumNumberShouldMatch(1);
                 }
-                flowFileIdQuery.setMinimumNumberShouldMatch(1);
-            }
-
-            BooleanQuery query;
-            if (lineageIdentifier == null) {
-                query = flowFileIdQuery;
-            } else {
-                final BooleanQuery lineageIdQuery = new BooleanQuery();
-                lineageIdQuery.add(new TermQuery(new Term(SearchableFields.LineageIdentifier.getSearchableFieldName(), lineageIdentifier)), Occur.MUST);
 
-                if (flowFileIdQuery == null) {
-                    query = lineageIdQuery;
+                BooleanQuery query;
+                if (lineageIdentifier == null) {
+                    query = flowFileIdQuery;
                 } else {
-                    query = new BooleanQuery();
-                    query.add(flowFileIdQuery, Occur.SHOULD);
-                    query.add(lineageIdQuery, Occur.SHOULD);
-                    query.setMinimumNumberShouldMatch(1);
+                    final BooleanQuery lineageIdQuery = new BooleanQuery();
+                    lineageIdQuery.add(new TermQuery(new Term(SearchableFields.LineageIdentifier.getSearchableFieldName(), lineageIdentifier)), Occur.MUST);
+
+                    if (flowFileIdQuery == null) {
+                        query = lineageIdQuery;
+                    } else {
+                        query = new BooleanQuery();
+                        query.add(flowFileIdQuery, Occur.SHOULD);
+                        query.add(lineageIdQuery, Occur.SHOULD);
+                        query.setMinimumNumberShouldMatch(1);
+                    }
                 }
-            }
 
-            final long searchStart = System.nanoTime();
-            final TopDocs uuidQueryTopDocs = searcher.search(query, MAX_QUERY_RESULTS);
-            final long searchEnd = System.nanoTime();
+                final long searchStart = System.nanoTime();
+                final TopDocs uuidQueryTopDocs = searcher.search(query, MAX_QUERY_RESULTS);
+                final long searchEnd = System.nanoTime();
+
+                final DocsReader docsReader = new DocsReader(repo.getConfiguration().getStorageDirectories());
+                final Set<ProvenanceEventRecord> recs = docsReader.read(uuidQueryTopDocs, searcher.getIndexReader(), repo.getAllLogFiles(), new AtomicInteger(0), Integer.MAX_VALUE);
+                final long readDocsEnd = System.nanoTime();
+                logger.debug("Finished Lineage Query; Lucene search took {} millis, reading records took {} millis",
+                        TimeUnit.NANOSECONDS.toMillis(searchEnd - searchStart), TimeUnit.NANOSECONDS.toMillis(readDocsEnd - searchEnd));
 
-            final DocsReader docsReader = new DocsReader(repo.getConfiguration().getStorageDirectories());
-            final Set<ProvenanceEventRecord> recs = docsReader.read(uuidQueryTopDocs, indexReader, repo.getAllLogFiles(), new AtomicInteger(0), Integer.MAX_VALUE);
-            final long readDocsEnd = System.nanoTime();
-            logger.debug("Finished Lineage Query; Lucene search took {} millis, reading records took {} millis",
-                    TimeUnit.NANOSECONDS.toMillis(searchEnd - searchStart), TimeUnit.NANOSECONDS.toMillis(readDocsEnd - searchEnd));
+                return recs;
+            } finally {
+                indexManager.returnIndexSearcher(indexDirectory, searcher);
+            }
+        } catch (final FileNotFoundException fnfe) {
+            // nothing has been indexed yet, or the data has already aged off
+            logger.warn("Attempted to search Provenance Index {} but could not find the file due to {}", indexDirectory, fnfe);
+            if ( logger.isDebugEnabled() ) {
+                logger.warn("", fnfe);
+            }
 
-            return recs;
+            return Collections.emptySet();
         }
     }
 


[2/2] incubator-nifi git commit: NIFI-555: Create index name based off of event time of first event in index, not based on creation time of index

Posted by ma...@apache.org.
NIFI-555: Create index name based off of event time of first event in index, not based on creation time of index


Project: http://git-wip-us.apache.org/repos/asf/incubator-nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-nifi/commit/4baf48ae
Tree: http://git-wip-us.apache.org/repos/asf/incubator-nifi/tree/4baf48ae
Diff: http://git-wip-us.apache.org/repos/asf/incubator-nifi/diff/4baf48ae

Branch: refs/heads/develop
Commit: 4baf48ae958ea5f084f50b0b74d7c0efe417a29f
Parents: 20831c8
Author: Mark Payne <ma...@hotmail.com>
Authored: Wed Apr 29 09:41:40 2015 -0400
Committer: Mark Payne <ma...@hotmail.com>
Committed: Wed Apr 29 09:41:40 2015 -0400

----------------------------------------------------------------------
 .../apache/nifi/provenance/IndexConfiguration.java  | 16 ++++++++--------
 .../provenance/PersistentProvenanceRepository.java  |  6 +++++-
 .../apache/nifi/provenance/lucene/LineageQuery.java |  4 ++--
 3 files changed, 15 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/4baf48ae/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
index 3beab65..9ea793d 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/IndexConfiguration.java
@@ -121,13 +121,13 @@ public class IndexConfiguration {
         }
     }
 
-    public File getWritableIndexDirectory(final File provenanceLogFile) {
+    public File getWritableIndexDirectory(final File provenanceLogFile, final long newIndexTimestamp) {
         lock.lock();
         try {
             final File storageDirectory = provenanceLogFile.getParentFile();
             List<File> indexDirectories = this.indexDirectoryMap.get(storageDirectory);
             if (indexDirectories == null) {
-                final File newDir = addNewIndex(storageDirectory, provenanceLogFile);
+                final File newDir = addNewIndex(storageDirectory, provenanceLogFile, newIndexTimestamp);
                 indexDirectories = new ArrayList<>();
                 indexDirectories.add(newDir);
                 indexDirectoryMap.put(storageDirectory, indexDirectories);
@@ -135,7 +135,7 @@ public class IndexConfiguration {
             }
 
             if (indexDirectories.isEmpty()) {
-                final File newDir = addNewIndex(storageDirectory, provenanceLogFile);
+                final File newDir = addNewIndex(storageDirectory, provenanceLogFile, newIndexTimestamp);
                 indexDirectories.add(newDir);
                 return newDir;
             }
@@ -143,7 +143,7 @@ public class IndexConfiguration {
             final File lastDir = indexDirectories.get(indexDirectories.size() - 1);
             final long size = getSize(lastDir);
             if (size > repoConfig.getDesiredIndexSize()) {
-                final File newDir = addNewIndex(storageDirectory, provenanceLogFile);
+                final File newDir = addNewIndex(storageDirectory, provenanceLogFile, newIndexTimestamp);
                 indexDirectories.add(newDir);
                 return newDir;
             } else {
@@ -154,14 +154,14 @@ public class IndexConfiguration {
         }
     }
 
-    private File addNewIndex(final File storageDirectory, final File provenanceLogFile) {
+    private File addNewIndex(final File storageDirectory, final File provenanceLogFile, final long newIndexTimestamp) {
         // Build the event time of the first record into the index's filename so that we can determine
         // which index files to look at when we perform a search. We use the timestamp of the first record
         // in the Provenance Log file, rather than the current time, because we may perform the Indexing
         // retroactively.
         Long firstEntryTime = getFirstEntryTime(provenanceLogFile);
         if (firstEntryTime == null) {
-            firstEntryTime = System.currentTimeMillis();
+            firstEntryTime = newIndexTimestamp;
         }
         return new File(storageDirectory, "index-" + firstEntryTime);
     }
@@ -222,7 +222,7 @@ public class IndexConfiguration {
                 }
             });
 
-            for (File indexDir : sortedIndexDirectories) {
+            for (final File indexDir : sortedIndexDirectories) {
                 // If the index was last modified before the start time, we know that it doesn't
                 // contain any data for us to query.
                 if (startTime != null && indexDir.lastModified() < startTime) {
@@ -282,7 +282,7 @@ public class IndexConfiguration {
             }
 
             boolean foundIndexCreatedLater = false;
-            for (File indexDir : sortedIndexDirectories) {
+            for (final File indexDir : sortedIndexDirectories) {
                 // If the index was last modified before the log file was created, we know the index doesn't include
                 // any data for the provenance log.
                 if (indexDir.lastModified() < firstEntryTime) {

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/4baf48ae/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
index 214fc7c..3bdd38f 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/PersistentProvenanceRepository.java
@@ -1231,6 +1231,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                 }
             });
 
+            long earliestTimestamp = System.currentTimeMillis();
             for (final RecordReader reader : readers) {
                 StandardProvenanceEventRecord record = null;
 
@@ -1252,6 +1253,9 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
                     continue;
                 }
 
+                if ( record.getEventTime() < earliestTimestamp ) {
+                    earliestTimestamp = record.getEventTime();
+                }
                 recordToReaderMap.put(record, reader);
             }
 
@@ -1262,7 +1266,7 @@ public class PersistentProvenanceRepository implements ProvenanceEventRepository
 
                 final IndexingAction indexingAction = new IndexingAction(this);
 
-                final File indexingDirectory = indexConfig.getWritableIndexDirectory(writerFile);
+                final File indexingDirectory = indexConfig.getWritableIndexDirectory(writerFile, earliestTimestamp);
                 final IndexWriter indexWriter = indexManager.borrowIndexWriter(indexingDirectory);
                 try {
                     long maxId = 0L;

http://git-wip-us.apache.org/repos/asf/incubator-nifi/blob/4baf48ae/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
----------------------------------------------------------------------
diff --git a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
index 5e4f69d..502068b 100644
--- a/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
+++ b/nifi/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/main/java/org/apache/nifi/provenance/lucene/LineageQuery.java
@@ -96,8 +96,8 @@ public class LineageQuery {
                 final DocsReader docsReader = new DocsReader(repo.getConfiguration().getStorageDirectories());
                 final Set<ProvenanceEventRecord> recs = docsReader.read(uuidQueryTopDocs, searcher.getIndexReader(), repo.getAllLogFiles(), new AtomicInteger(0), Integer.MAX_VALUE);
                 final long readDocsEnd = System.nanoTime();
-                logger.debug("Finished Lineage Query; Lucene search took {} millis, reading records took {} millis",
-                        TimeUnit.NANOSECONDS.toMillis(searchEnd - searchStart), TimeUnit.NANOSECONDS.toMillis(readDocsEnd - searchEnd));
+                logger.debug("Finished Lineage Query against {}; Lucene search took {} millis, reading records took {} millis",
+                        indexDirectory, TimeUnit.NANOSECONDS.toMillis(searchEnd - searchStart), TimeUnit.NANOSECONDS.toMillis(readDocsEnd - searchEnd));
 
                 return recs;
             } finally {