You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2014/11/30 12:07:19 UTC

svn commit: r1642535 [12/19] - in /lucene/dev/branches/lucene6005/lucene: analysis/common/src/java/org/apache/lucene/collation/ analysis/common/src/test/org/apache/lucene/analysis/core/ analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/...

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRange.java Sun Nov 30 11:07:09 2014
@@ -20,6 +20,7 @@ package org.apache.lucene.facet.range;
 import java.io.IOException;
 import java.util.Collections;
 
+import org.apache.lucene.document.Document;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
@@ -27,7 +28,6 @@ import org.apache.lucene.search.DocIdSet
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Filter;
 import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.NumericUtils;
 
 /** Represents a range over double values.
  *
@@ -91,8 +91,8 @@ public final class DoubleRange extends R
 
   LongRange toLongRange() {
     return new LongRange(label,
-                         NumericUtils.doubleToSortableLong(minIncl), true,
-                         NumericUtils.doubleToSortableLong(maxIncl), true);
+                         Document.doubleToSortableLong(minIncl), true,
+                         Document.doubleToSortableLong(maxIncl), true);
   }
 
   @Override

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/DoubleRangeFacetCounts.java Sun Nov 30 11:07:09 2014
@@ -21,8 +21,7 @@ import java.io.IOException;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.lucene.document.DoubleDocValuesField; // javadocs
-import org.apache.lucene.document.FloatDocValuesField; // javadocs
+import org.apache.lucene.document.Document;
 import org.apache.lucene.facet.Facets;
 import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.FacetsCollector;
@@ -31,10 +30,9 @@ import org.apache.lucene.queries.functio
 import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
 import org.apache.lucene.queries.function.valuesource.FloatFieldSource; // javadocs
 import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Filter;
 import org.apache.lucene.util.Bits;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.util.NumericUtils;
 
 /** {@link Facets} implementation that computes counts for
  *  dynamic double ranges from a provided {@link
@@ -46,9 +44,9 @@ import org.apache.lucene.util.NumericUti
  *  etc.).
  *
  *  <p> If you had indexed your field using {@link
- *  FloatDocValuesField} then pass {@link FloatFieldSource}
+ *  org.apache.lucene.document.Document#addFloat} then pass {@link FloatFieldSource}
  *  as the {@link ValueSource}; if you used {@link
- *  DoubleDocValuesField} then pass {@link
+ *  org.apache.lucene.document.Document#addDouble} then pass {@link
  *  DoubleFieldSource} (this is the default used when you
  *  pass just a the field name).
  *
@@ -85,8 +83,8 @@ public class DoubleRangeFacetCounts exte
     for(int i=0;i<ranges.length;i++) {
       DoubleRange range = ranges[i];
       longRanges[i] =  new LongRange(range.label,
-                                     NumericUtils.doubleToSortableLong(range.minIncl), true,
-                                     NumericUtils.doubleToSortableLong(range.maxIncl), true);
+                                     Document.doubleToSortableLong(range.minIncl), true,
+                                     Document.doubleToSortableLong(range.maxIncl), true);
     }
 
     LongRangeCounter counter = new LongRangeCounter(longRanges);
@@ -121,7 +119,7 @@ public class DoubleRangeFacetCounts exte
         }
         // Skip missing docs:
         if (fv.exists(doc)) {
-          counter.add(NumericUtils.doubleToSortableLong(fv.doubleVal(doc)));
+          counter.add(Document.doubleToSortableLong(fv.doubleVal(doc)));
         } else {
           missingCount++;
         }

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/Range.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/Range.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/Range.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/range/Range.java Sun Nov 30 11:07:09 2014
@@ -17,12 +17,12 @@ package org.apache.lucene.facet.range;
  * limitations under the License.
  */
 
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.facet.DrillDownQuery; // javadocs
 import org.apache.lucene.facet.DrillSideways; // javadocs
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.Filter;
 import org.apache.lucene.search.FilteredQuery; // javadocs
-import org.apache.lucene.search.NumericRangeFilter; // javadocs
 
 /** Base class for a single labeled range.
  *
@@ -48,7 +48,7 @@ public abstract class Range {
    *  FilteredQuery#QUERY_FIRST_FILTER_STRATEGY}.  If the
    *  {@link ValueSource} is static, e.g. an indexed numeric
    *  field, then it may be more efficient to use {@link
-   *  NumericRangeFilter}.  The provided fastMatchFilter,
+   *  FieldTypes#newRangeFilter}.  The provided fastMatchFilter,
    *  if non-null, will first be consulted, and only if
    *  that is set for each document will the range then be
    *  checked. */
@@ -61,7 +61,7 @@ public abstract class Range {
    *  {@link FilteredQuery} using its {@link
    *  FilteredQuery#QUERY_FIRST_FILTER_STRATEGY}.  If the
    *  {@link ValueSource} is static, e.g. an indexed numeric
-   *  field, then it may be more efficient to use {@link NumericRangeFilter}. */
+   *  field, then it may be more efficient to use {@link FieldTypes#newRangeFilter}. */
   public Filter getFilter(ValueSource valueSource) {
     return getFilter(null, valueSource);
   }

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetField.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetField.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesFacetField.java Sun Nov 30 11:07:09 2014
@@ -17,20 +17,29 @@ package org.apache.lucene.facet.sortedse
  * limitations under the License.
  */
 
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.facet.FacetField;
+import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.IndexableFieldType;
+import org.apache.lucene.util.BytesRef;
 
 /** Add an instance of this to your Document for every facet
  *  label to be indexed via SortedSetDocValues. */
-public class SortedSetDocValuesFacetField extends Field {
+public class SortedSetDocValuesFacetField implements IndexableField {
   
-  /** Indexed {@link FieldType}. */
-  public static final FieldType TYPE = new FieldType();
-  static {
-    TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
-    TYPE.freeze();
+  public static final IndexableFieldType TYPE = new IndexableFieldType() {
+    };
+
+  @Override
+  public String name() {
+    return "dummy";
+  }
+
+  public IndexableFieldType fieldType() {
+    return TYPE;
   }
 
   /** Dimension. */
@@ -41,7 +50,6 @@ public class SortedSetDocValuesFacetFiel
 
   /** Sole constructor. */
   public SortedSetDocValuesFacetField(String dim, String label) {
-    super("dummy", TYPE);
     FacetField.verifyLabel(label);
     FacetField.verifyLabel(dim);
     this.dim = dim;

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/AssociationFacetField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/AssociationFacetField.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/AssociationFacetField.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/AssociationFacetField.java Sun Nov 30 11:07:09 2014
@@ -19,12 +19,14 @@ package org.apache.lucene.facet.taxonomy
 
 import java.util.Arrays;
 
-import org.apache.lucene.document.Document; // javadocs
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.facet.FacetField;
 import org.apache.lucene.facet.Facets;
+import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.IndexableFieldType;
 import org.apache.lucene.util.BytesRef;
 
 /** Add an instance of this to your {@link Document} to add
@@ -36,15 +38,20 @@ import org.apache.lucene.util.BytesRef;
  *  Facets} implementations.
  * 
  *  @lucene.experimental */
-public class AssociationFacetField extends Field {
+public class AssociationFacetField implements IndexableField {
   
-  /** Indexed {@link FieldType}. */
-  public static final FieldType TYPE = new FieldType();
-  static {
-    TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
-    TYPE.freeze();
-  }
+  public static final IndexableFieldType TYPE = new IndexableFieldType() {
+    };
   
+  @Override
+  public String name() {
+    return "dummy";
+  }
+
+  public IndexableFieldType fieldType() {
+    return TYPE;
+  }
+
   /** Dimension for this field. */
   public final String dim;
 
@@ -57,7 +64,6 @@ public class AssociationFacetField exten
   /** Creates this from {@code dim} and {@code path} and an
    *  association */
   public AssociationFacetField(BytesRef assoc, String dim, String... path) {
-    super("dummy", TYPE);
     FacetField.verifyLabel(dim);
     for(String label : path) {
       FacetField.verifyLabel(label);

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatAssociationFacetField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatAssociationFacetField.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatAssociationFacetField.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatAssociationFacetField.java Sun Nov 30 11:07:09 2014
@@ -19,7 +19,6 @@ package org.apache.lucene.facet.taxonomy
 
 import java.util.Arrays;
 
-import org.apache.lucene.document.Document;
 import org.apache.lucene.util.BytesRef;
 
 /** Add an instance of this to your {@link Document} to add

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntAssociationFacetField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntAssociationFacetField.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntAssociationFacetField.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntAssociationFacetField.java Sun Nov 30 11:07:09 2014
@@ -19,7 +19,6 @@ package org.apache.lucene.facet.taxonomy
 
 import java.util.Arrays;
 
-import org.apache.lucene.document.Document;
 import org.apache.lucene.util.BytesRef;
 
 /** Add an instance of this to your {@link Document} to add

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java Sun Nov 30 11:07:09 2014
@@ -5,7 +5,7 @@ import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.lucene.document.Document2;
+import org.apache.lucene.document.Document;
 import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.facet.taxonomy.FacetLabel;
 import org.apache.lucene.facet.taxonomy.LRUHashMap;
@@ -312,7 +312,7 @@ public class DirectoryTaxonomyReader ext
       }
     }
     
-    Document2 doc = indexReader.document(ordinal);
+    Document doc = indexReader.document(ordinal);
     FacetLabel ret = new FacetLabel(FacetsConfig.stringToPath(doc.getString(Consts.FULL)));
     synchronized (categoryCache) {
       categoryCache.put(catIDInteger, ret);

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java Sun Nov 30 11:07:09 2014
@@ -15,10 +15,7 @@ import org.apache.lucene.analysis.TokenS
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.StringField;
-import org.apache.lucene.document.TextField;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.facet.taxonomy.FacetLabel;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
@@ -31,8 +28,8 @@ import org.apache.lucene.index.Directory
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.LogByteSizeMergePolicy;
@@ -101,8 +98,6 @@ public class DirectoryTaxonomyWriter imp
   private long indexEpoch;
 
   private SinglePositionTokenStream parentStream = new SinglePositionTokenStream(Consts.PAYLOAD_PARENT);
-  private Field parentStreamField;
-  private Field fullPathField;
   private int cacheMissesUntilFill = 11;
   private boolean shouldFillCache = true;
   
@@ -189,12 +184,10 @@ public class DirectoryTaxonomyWriter imp
     if (openMode == OpenMode.CREATE) {
       ++indexEpoch;
     }
+    FieldTypes fieldTypes = indexWriter.getFieldTypes();
+    fieldTypes.disableNorms(Consts.FIELD_PAYLOADS);
+    fieldTypes.disableStored(Consts.FIELD_PAYLOADS);
     
-    FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
-    ft.setOmitNorms(true);
-    parentStreamField = new Field(Consts.FIELD_PAYLOADS, parentStream, ft);
-    fullPathField = new StringField(Consts.FULL, "", Field.Store.YES);
-
     nextID = indexWriter.maxDoc();
 
     if (cache == null) {
@@ -484,12 +477,11 @@ public class DirectoryTaxonomyWriter imp
     // we write here (e.g., to write parent+2), and need to do a workaround
     // in the reader (which knows that anyway only category 0 has a parent
     // -1).    
-    parentStream.set(Math.max(parent + 1, 1));
-    Document d = new Document();
-    d.add(parentStreamField);
 
-    fullPathField.setStringValue(FacetsConfig.pathToString(categoryPath.components, categoryPath.length));
-    d.add(fullPathField);
+    Document d = indexWriter.newDocument();
+    parentStream.set(Math.max(parent + 1, 1));
+    d.addLargeText(Consts.FIELD_PAYLOADS, parentStream);
+    d.addAtom(Consts.FULL, FacetsConfig.pathToString(categoryPath.components, categoryPath.length));
 
     // Note that we do no pass an Analyzer here because the fields that are
     // added to the Document are untokenized or contains their own TokenStream.

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillDownQuery.java Sun Nov 30 11:07:09 2014
@@ -22,11 +22,8 @@ import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.MockTokenizer;
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldTypes;
-import org.apache.lucene.document.TextField;
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@@ -93,7 +90,7 @@ public class TestDrillDownQuery extends 
     config.setRequireDimCount("b", true);
 
     for (int i = 0; i < 100; i++) {
-      Document2 doc = writer.newDocument();
+      Document doc = writer.newDocument();
       if (i % 2 == 0) { // 50
         doc.addLargeText("content", "foo");
       }

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java Sun Nov 30 11:07:09 2014
@@ -28,12 +28,8 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldTypes;
-import org.apache.lucene.document.SortedDocValuesField;
-import org.apache.lucene.document.StringField;
 import org.apache.lucene.facet.DrillSideways.DrillSidewaysResult;
 import org.apache.lucene.facet.sortedset.DefaultSortedSetDocValuesReaderState;
 import org.apache.lucene.facet.sortedset.SortedSetDocValuesFacetField;
@@ -83,7 +79,7 @@ public class TestDrillSideways extends F
 
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     doc.add(new FacetField("Author", "Bob"));
     doc.add(new FacetField("Publish Date", "2010", "10", "15"));
     writer.addDocument(config.build(taxoWriter, doc));
@@ -260,7 +256,7 @@ public class TestDrillSideways extends F
     FacetsConfig config = new FacetsConfig();
     config.setHierarchical("Publish Date", true);
 
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     doc.add(new FacetField("Author", "Bob"));
     doc.add(new FacetField("Publish Date", "2010", "10", "15"));
     writer.addDocument(config.build(taxoWriter, doc));
@@ -315,7 +311,7 @@ public class TestDrillSideways extends F
     FacetsConfig config = new FacetsConfig();
     config.setHierarchical("dim", true);
 
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     doc.add(new FacetField("dim", "a", "x"));
     writer.addDocument(config.build(taxoWriter, doc));
 
@@ -503,7 +499,7 @@ public class TestDrillSideways extends F
     }
 
     for(Doc rawDoc : docs) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       doc.addAtom("id", rawDoc.id);
       doc.addAtom("content", rawDoc.contentToken);
 

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestFacetsConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestFacetsConfig.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestFacetsConfig.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestFacetsConfig.java Sun Nov 30 11:07:09 2014
@@ -62,7 +62,7 @@ public class TestFacetsConfig extends Fa
     IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
     DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
     FacetsConfig facetsConfig = new FacetsConfig();
-    Document doc = new Document();
+    Document doc = indexWriter.newDocument();
     doc.add(new FacetField("a", "b"));
     doc = facetsConfig.build(taxoWriter, doc);
     // these two addDocument() used to fail

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestMultipleIndexFields.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestMultipleIndexFields.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestMultipleIndexFields.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestMultipleIndexFields.java Sun Nov 30 11:07:09 2014
@@ -24,16 +24,14 @@ import java.util.Map;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.TextField;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchAllDocsQuery;
@@ -291,10 +289,10 @@ public class TestMultipleIndexFields ext
 
   private void seedIndex(TaxonomyWriter tw, RandomIndexWriter iw, FacetsConfig config) throws IOException {
     for (FacetField ff : CATEGORIES) {
-      Document doc = new Document();
+      Document doc = iw.newDocument();
       doc.add(ff);
-      doc.add(new TextField("content", "alpha", Field.Store.YES));
+      doc.addLargeText("content", "alpha");
       iw.addDocument(config.build(tw, doc));
     }
   }
-}
\ No newline at end of file
+}

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestRandomSamplingFacetsCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestRandomSamplingFacetsCollector.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestRandomSamplingFacetsCollector.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/TestRandomSamplingFacetsCollector.java Sun Nov 30 11:07:09 2014
@@ -4,8 +4,6 @@ import java.util.List;
 import java.util.Random;
 
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.document.StringField;
 import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.taxonomy.FastTaxonomyFacetCounts;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
@@ -63,10 +61,11 @@ public class TestRandomSamplingFacetsCol
     final int numCategories = 10;
     int numDocs = atLeast(10000);
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
-      doc.add(new StringField("EvenOdd", (i % 2 == 0) ? "even" : "odd", Store.NO));
+      Document doc = writer.newDocument();
+      doc.addAtom("EvenOdd", (i % 2 == 0) ? "even" : "odd");
       doc.add(new FacetField("iMod10", Integer.toString(i % numCategories)));
-      writer.addDocument(config.build(taxoWriter, doc));
+      Document built = config.build(taxoWriter, doc);
+      writer.addDocument(built);
     }
     
     // NRT open
@@ -140,7 +139,7 @@ public class TestRandomSamplingFacetsCol
       LabelAndValue amortized = amortized10Result.labelValues[i];
       LabelAndValue sampled = random10Result.labelValues[i];
       // since numDocs may not divide by 10 exactly, allow for some slack in the amortized count 
-      assertEquals(amortized.value.floatValue(), Math.min(5 * sampled.value.floatValue(), numDocs / 10.f), 1.0);
+      assertEquals(Math.min(5 * sampled.value.floatValue(), numDocs / 10.f), amortized.value.floatValue(), 1.0);
     }
     
     IOUtils.close(searcher.getIndexReader(), taxoReader, dir, taxoDir);

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/range/TestRangeFacetCounts.java Sun Nov 30 11:07:09 2014
@@ -22,13 +22,8 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.DoubleDocValuesField;
-import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldTypes;
-import org.apache.lucene.document.FloatDocValuesField;
-import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.facet.DrillDownQuery;
 import org.apache.lucene.facet.DrillSideways.DrillSidewaysResult;
 import org.apache.lucene.facet.DrillSideways;
@@ -62,8 +57,6 @@ import org.apache.lucene.search.Filter;
 import org.apache.lucene.search.FilterCachingPolicy;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.NumericRangeFilter;
-import org.apache.lucene.search.NumericRangeQuery;
 import org.apache.lucene.search.QueryWrapperFilter;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BitDocIdSet;
@@ -78,13 +71,13 @@ public class TestRangeFacetCounts extend
     Directory d = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d);
     for(long l=0;l<100;l++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       doc.addLong("field", l);
       w.addDocument(doc);
     }
 
     // Also add Long.MAX_VALUE
-    Document2 doc = w.newDocument();
+    Document doc = w.newDocument();
     doc.addLong("field", Long.MAX_VALUE);
     w.addDocument(doc);
 
@@ -143,7 +136,7 @@ public class TestRangeFacetCounts extend
     Directory d = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d);
 
-    Document2 doc = w.newDocument();
+    Document doc = w.newDocument();
     doc.addLong("field", Long.MIN_VALUE);
     w.addDocument(doc);
 
@@ -182,11 +175,11 @@ public class TestRangeFacetCounts extend
     Directory d = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d);
     for(long l=0;l<100;l++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       doc.addLong("field", l);
       w.addDocument(doc);
     }
-    Document2 doc = w.newDocument();
+    Document doc = w.newDocument();
     doc.addLong("field", Long.MAX_VALUE);
     w.addDocument(doc);
 
@@ -222,7 +215,7 @@ public class TestRangeFacetCounts extend
     FacetsConfig config = new FacetsConfig();
 
     for (long l = 0; l < 100; l++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       // For computing range facet counts and drill down by numeric range:
       doc.addLong("field", l);
 
@@ -316,7 +309,7 @@ public class TestRangeFacetCounts extend
     Directory d = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d);
     for(long l=0;l<100;l++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       doc.addDouble("field", l);
       w.addDocument(doc);
     }
@@ -344,7 +337,7 @@ public class TestRangeFacetCounts extend
     Directory d = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d);
     for(long l=0;l<100;l++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       doc.addFloat("field", l);
       w.addDocument(doc);
     }
@@ -381,7 +374,7 @@ public class TestRangeFacetCounts extend
     long minValue = Long.MAX_VALUE;
     long maxValue = Long.MIN_VALUE;
     for(int i=0;i<numDocs;i++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       long v = random().nextLong();
       values[i] = v;
       doc.addLong("field", v);
@@ -526,7 +519,7 @@ public class TestRangeFacetCounts extend
     float minValue = Float.POSITIVE_INFINITY;
     float maxValue = Float.NEGATIVE_INFINITY;
     for(int i=0;i<numDocs;i++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       float v = random().nextFloat();
       values[i] = v;
       doc.addFloat("field", v);
@@ -685,7 +678,7 @@ public class TestRangeFacetCounts extend
     double minValue = Double.POSITIVE_INFINITY;
     double maxValue = Double.NEGATIVE_INFINITY;
     for(int i=0;i<numDocs;i++) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       double v = random().nextDouble();
       values[i] = v;
       doc.addDouble("field", v);
@@ -830,7 +823,7 @@ public class TestRangeFacetCounts extend
         w.addDocument(w.newDocument());
         continue;
       }
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       doc.addLong("field", l);
       w.addDocument(doc);
     }
@@ -859,7 +852,7 @@ public class TestRangeFacetCounts extend
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     writer.addDocument(doc);
     writer.addDocument(doc);
     writer.addDocument(doc);

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/sortedset/TestSortedSetDocValuesFacets.java Sun Nov 30 11:07:09 2014
@@ -22,9 +22,8 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.facet.DrillDownQuery;
 import org.apache.lucene.facet.FacetResult;
 import org.apache.lucene.facet.FacetTestCase;
@@ -56,7 +55,7 @@ public class TestSortedSetDocValuesFacet
     config.setMultiValued("a", true);
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo"));
     doc.add(new SortedSetDocValuesFacetField("a", "bar"));
     doc.add(new SortedSetDocValuesFacetField("a", "zoo"));
@@ -102,21 +101,23 @@ public class TestSortedSetDocValuesFacet
     Directory dir = newDirectory();
 
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.setMultiValued("a");
 
     FacetsConfig config = new FacetsConfig();
 
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo"));
     writer.addDocument(config.build(doc));
 
     IndexReader r = writer.getReader();
     SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(r);
 
-    doc = new Document();
+    doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "bar"));
     writer.addDocument(config.build(doc));
 
-    doc = new Document();
+    doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "baz"));
     writer.addDocument(config.build(doc));
 
@@ -144,10 +145,14 @@ public class TestSortedSetDocValuesFacet
     Directory dir = newDirectory();
 
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.setMultiValued("a");
+    fieldTypes.setMultiValued("b");
+    fieldTypes.setMultiValued("c");
 
     FacetsConfig config = new FacetsConfig();
 
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo1"));
     writer.addDocument(config.build(doc));
 
@@ -155,7 +160,7 @@ public class TestSortedSetDocValuesFacet
       writer.commit();
     }
 
-    doc = new Document();
+    doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo2"));
     doc.add(new SortedSetDocValuesFacetField("b", "bar1"));
     writer.addDocument(config.build(doc));
@@ -164,7 +169,7 @@ public class TestSortedSetDocValuesFacet
       writer.commit();
     }
 
-    doc = new Document();
+    doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo3"));
     doc.add(new SortedSetDocValuesFacetField("b", "bar2"));
     doc.add(new SortedSetDocValuesFacetField("c", "baz1"));
@@ -198,18 +203,21 @@ public class TestSortedSetDocValuesFacet
 
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.setMultiValued("a");
+
     FacetsConfig config = new FacetsConfig();
 
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo1"));
     writer.addDocument(config.build(doc));
     writer.commit();
 
-    doc = new Document();
+    doc = writer.newDocument();
     writer.addDocument(config.build(doc));
     writer.commit();
 
-    doc = new Document();
+    doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo2"));
     writer.addDocument(config.build(doc));
     writer.commit();
@@ -237,15 +245,18 @@ public class TestSortedSetDocValuesFacet
 
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.setMultiValued("a");
+
     FacetsConfig config = new FacetsConfig();
 
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo1"));
     writer.addDocument(config.build(doc));
 
     writer.commit();
 
-    doc = new Document();
+    doc = writer.newDocument();
     doc.add(new SortedSetDocValuesFacetField("a", "foo2"));
     writer.addDocument(config.build(doc));
 
@@ -273,13 +284,18 @@ public class TestSortedSetDocValuesFacet
     Directory taxoDir = newDirectory();
 
     RandomIndexWriter w = new RandomIndexWriter(random(), indexDir);
+    FieldTypes fieldTypes = w.getFieldTypes();
+
     FacetsConfig config = new FacetsConfig();
     int numDocs = atLeast(1000);
     int numDims = TestUtil.nextInt(random(), 1, 7);
+    for(int dim=0;dim<numDims;dim++) {
+      fieldTypes.setMultiValued("dim" + dim);
+    }
     List<TestDoc> testDocs = getRandomDocs(tokens, numDocs, numDims);
     for(TestDoc testDoc : testDocs) {
-      Document doc = new Document();
-      doc.add(newStringField("content", testDoc.content, Field.Store.NO));
+      Document doc = w.newDocument();
+      doc.addAtom("content", testDoc.content);
       for(int j=0;j<numDims;j++) {
         if (testDoc.dims[j] != null) {
           doc.add(new SortedSetDocValuesFacetField("dim" + j, testDoc.dims[j]));

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCachedOrdinalsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCachedOrdinalsReader.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCachedOrdinalsReader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestCachedOrdinalsReader.java Sun Nov 30 11:07:09 2014
@@ -25,10 +25,10 @@ import org.apache.lucene.facet.FacetFiel
 import org.apache.lucene.facet.FacetTestCase;
 import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
-import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.IOUtils;
 import org.junit.Test;
@@ -45,10 +45,10 @@ public class TestCachedOrdinalsReader ex
     DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
     FacetsConfig config = new FacetsConfig();
     
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new FacetField("A", "1"));
     writer.addDocument(config.build(taxoWriter, doc));
-    doc = new Document();
+    doc = writer.newDocument();
     doc.add(new FacetField("A", "2"));
     writer.addDocument(config.build(taxoWriter, doc));
     

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestOrdinalMappingLeafReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestOrdinalMappingLeafReader.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestOrdinalMappingLeafReader.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestOrdinalMappingLeafReader.java Sun Nov 30 11:07:09 2014
@@ -2,8 +2,8 @@ package org.apache.lucene.facet.taxonomy
 
 import java.io.IOException;
 
-import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.facet.FacetField;
 import org.apache.lucene.facet.FacetResult;
 import org.apache.lucene.facet.FacetTestCase;
@@ -12,8 +12,8 @@ import org.apache.lucene.facet.FacetsCol
 import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.facet.LabelAndValue;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
-import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
 import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
@@ -105,7 +105,9 @@ public class TestOrdinalMappingLeafReade
     assertEquals(NUM_DOCS * 2, idResult.value); // each "id" appears twice
     
     BinaryDocValues bdv = MultiDocValues.getBinaryValues(indexReader, "bdv");
+    assertNotNull(bdv);
     BinaryDocValues cbdv = MultiDocValues.getBinaryValues(indexReader, "cbdv");
+    assertNotNull(cbdv);
     for (int i = 0; i < indexReader.maxDoc(); i++) {
       assertEquals(Integer.parseInt(cbdv.get(i).utf8ToString()), Integer.parseInt(bdv.get(i).utf8ToString())*2);
     }
@@ -115,10 +117,13 @@ public class TestOrdinalMappingLeafReade
   private void buildIndexWithFacets(Directory indexDir, Directory taxoDir, boolean asc) throws IOException {
     IndexWriterConfig config = newIndexWriterConfig(null);
     RandomIndexWriter writer = new RandomIndexWriter(random(), indexDir, config);
+    FieldTypes fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableSorting("bdv");
+    fieldTypes.disableSorting("cbdv");
     
     DirectoryTaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
     for (int i = 1; i <= NUM_DOCS; i++) {
-      Document doc = new Document();
+      Document doc = writer.newDocument();
       for (int j = i; j <= NUM_DOCS; j++) {
         int facetValue = asc ? j: NUM_DOCS - j;
         doc.add(new FacetField("tag", Integer.toString(facetValue)));
@@ -127,8 +132,8 @@ public class TestOrdinalMappingLeafReade
       doc.add(new FacetField("id", Integer.toString(i)));
       
       // make sure OrdinalMappingLeafReader ignores non-facet BinaryDocValues fields
-      doc.add(new BinaryDocValuesField("bdv", new BytesRef(Integer.toString(i))));
-      doc.add(new BinaryDocValuesField("cbdv", new BytesRef(Integer.toString(i*2))));
+      doc.addBinary("bdv", new BytesRef(Integer.toString(i)));
+      doc.addBinary("cbdv", new BytesRef(Integer.toString(i*2)));
       writer.addDocument(facetConfig.build(taxonomyWriter, doc));
     }
     taxonomyWriter.commit();

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java Sun Nov 30 11:07:09 2014
@@ -70,7 +70,7 @@ public class TestSearcherTaxonomyManager
         Set<String> seen = new HashSet<>();
         List<String> paths = new ArrayList<>();
         while (true) {
-          Document doc = new Document();
+          Document doc = w.newDocument();
           int numPaths = TestUtil.nextInt(random(), 1, 5);
           for(int i=0;i<numPaths;i++) {
             String path;
@@ -271,7 +271,7 @@ public class TestSearcherTaxonomyManager
     tw2.close();
 
     SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw);
-    w.addDocument(new Document());
+    w.addDocument(w.newDocument());
     tw.replaceTaxonomy(taxoDir2);
     taxoDir2.close();
 
@@ -307,7 +307,7 @@ public class TestSearcherTaxonomyManager
       mgr.release(pair);
     }
     
-    w.addDocument(new Document());
+    w.addDocument(w.newDocument());
     tw.replaceTaxonomy(taxoDir2);
     taxoDir2.close();
     w.commit();

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetAssociations.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetAssociations.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetAssociations.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetAssociations.java Sun Nov 30 11:07:09 2014
@@ -18,7 +18,6 @@ package org.apache.lucene.facet.taxonomy
  */
 
 
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.facet.DrillDownQuery;
 import org.apache.lucene.facet.FacetTestCase;
@@ -65,7 +64,7 @@ public class TestTaxonomyFacetAssociatio
 
     // index documents, 50% have only 'b' and all have 'a'
     for (int i = 0; i < 110; i++) {
-      Document2 doc = writer.newDocument();
+      Document doc = writer.newDocument();
       // every 11th document is added empty, this used to cause the association
       // aggregators to go into an infinite loop
       if (i % 11 != 0) {
@@ -168,7 +167,7 @@ public class TestTaxonomyFacetAssociatio
     FacetsConfig config = new FacetsConfig();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new IntAssociationFacetField(14, "a", "x"));
     doc.add(new FloatAssociationFacetField(55.0f, "b", "y"));
     try {
@@ -190,7 +189,7 @@ public class TestTaxonomyFacetAssociatio
     config.setHierarchical("a", true);
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new IntAssociationFacetField(14, "a", "x"));
     try {
       writer.addDocument(config.build(taxoWriter, doc));
@@ -211,7 +210,7 @@ public class TestTaxonomyFacetAssociatio
     config.setRequireDimCount("a", true);
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document doc = new Document();
+    Document doc = writer.newDocument();
     doc.add(new IntAssociationFacetField(14, "a", "x"));
     try {
       writer.addDocument(config.build(taxoWriter, doc));

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java Sun Nov 30 11:07:09 2014
@@ -27,10 +27,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.StringField;
 import org.apache.lucene.facet.DrillDownQuery;
 import org.apache.lucene.facet.FacetField;
 import org.apache.lucene.facet.FacetResult;
@@ -73,7 +70,7 @@ public class TestTaxonomyFacetCounts ext
 
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     doc.add(new FacetField("Author", "Bob"));
     doc.add(new FacetField("Publish Date", "2010", "10", "15"));
     writer.addDocument(config.build(taxoWriter, doc));
@@ -158,7 +155,7 @@ public class TestTaxonomyFacetCounts ext
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     FacetsConfig config = new FacetsConfig();
 
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     doc.add(new FacetField("a", "foo1"));
     writer.addDocument(config.build(taxoWriter, doc));
 
@@ -216,7 +213,7 @@ public class TestTaxonomyFacetCounts ext
     config.setIndexFieldName("a", "$facets2");
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document2 doc = writer.newDocument();
+    Document doc = writer.newDocument();
     doc.add(new FacetField("a", "foo1"));
     writer.addDocument(config.build(taxoWriter, doc));
 
@@ -280,8 +277,8 @@ public class TestTaxonomyFacetCounts ext
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
     FacetsConfig config = new FacetsConfig();
 
-    Document2 doc = writer.newDocument();
-    doc.add(newTextField("field", "text", Field.Store.NO));
+    Document doc = writer.newDocument();
+    doc.addLargeText("field", "text");
     doc.add(new FacetField("a", "path"));
     writer.addDocument(config.build(taxoWriter, doc));
     writer.close();
@@ -297,8 +294,8 @@ public class TestTaxonomyFacetCounts ext
     config.setMultiValued("a", true);
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document2 doc = writer.newDocument();
-    doc.add(newTextField("field", "text", Field.Store.NO));
+    Document doc = writer.newDocument();
+    doc.addLargeText("field", "text");
     doc.add(new FacetField("a", "path", "x"));
     doc.add(new FacetField("a", "path", "y"));
     writer.addDocument(config.build(taxoWriter, doc));
@@ -343,8 +340,8 @@ public class TestTaxonomyFacetCounts ext
     FacetsConfig config = new FacetsConfig();
     config.setMultiValued("dim", true);
 
-    Document2 doc = writer.newDocument();
-    doc.add(newTextField("field", "text", Field.Store.NO));
+    Document doc = writer.newDocument();
+    doc.addLargeText("field", "text");
     doc.add(new FacetField("dim", "test\u001Fone"));
     doc.add(new FacetField("dim", "test\u001Etwo"));
     writer.addDocument(config.build(taxoWriter, doc));
@@ -384,8 +381,8 @@ public class TestTaxonomyFacetCounts ext
     config.setHierarchical("dim3", true);
     config.setRequireDimCount("dim3", true);
 
-    Document2 doc = writer.newDocument();
-    doc.add(newTextField("field", "text", Field.Store.NO));
+    Document doc = writer.newDocument();
+    doc.addLargeText("field", "text");
     doc.add(new FacetField("dim", "a"));
     doc.add(new FacetField("dim2", "a"));
     doc.add(new FacetField("dim2", "b"));
@@ -433,8 +430,8 @@ public class TestTaxonomyFacetCounts ext
     
     int numLabels = TestUtil.nextInt(random(), 40000, 100000);
     
-    Document2 doc = writer.newDocument();
-    doc.add(newTextField("field", "text", Field.Store.NO));
+    Document doc = writer.newDocument();
+    doc.addLargeText("field", "text");
     for (int i = 0; i < numLabels; i++) {
       doc.add(new FacetField("dim", "" + i));
     }
@@ -478,8 +475,8 @@ public class TestTaxonomyFacetCounts ext
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     FacetsConfig config = new FacetsConfig();
 
-    Document2 doc = writer.newDocument();
-    doc.add(newTextField("field", "text", Field.Store.NO));
+    Document doc = writer.newDocument();
+    doc.addLargeText("field", "text");
     doc.add(new FacetField("a", "path", "other"));
     try {
       config.build(taxoWriter, doc);
@@ -500,8 +497,8 @@ public class TestTaxonomyFacetCounts ext
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     FacetsConfig config = new FacetsConfig();
 
-    Document2 doc = writer.newDocument();
-    doc.add(newTextField("field", "text", Field.Store.NO));
+    Document doc = writer.newDocument();
+    doc.addLargeText("field", "text");
     doc.add(new FacetField("a", "path"));
     doc.add(new FacetField("a", "path2"));
     try {
@@ -524,7 +521,7 @@ public class TestTaxonomyFacetCounts ext
     config.setIndexFieldName("b", "$b");
     
     for(int i = atLeast(30); i > 0; --i) {
-      Document2 doc = iw.newDocument();
+      Document doc = iw.newDocument();
       doc.addAtom("f", "v");
       doc.add(new FacetField("a", "1"));
       doc.add(new FacetField("b", "1"));
@@ -553,7 +550,7 @@ public class TestTaxonomyFacetCounts ext
     IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
     FacetsConfig config = new FacetsConfig();
     for(int i = atLeast(30); i > 0; --i) {
-      Document2 doc = iw.newDocument();
+      Document doc = iw.newDocument();
       doc.add(new FacetField("a", "1"));
       doc.add(new FacetField("b", "1"));
       iw.addDocument(config.build(taxoWriter, doc));
@@ -582,7 +579,7 @@ public class TestTaxonomyFacetCounts ext
     IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
     FacetsConfig config = new FacetsConfig();
 
-    Document2 doc = iw.newDocument();
+    Document doc = iw.newDocument();
     doc.add(new FacetField("a", "1"));
     doc.add(new FacetField("b", "1"));
     iw.addDocument(config.build(taxoWriter, doc));
@@ -611,7 +608,7 @@ public class TestTaxonomyFacetCounts ext
     IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
     FacetsConfig config = new FacetsConfig();
     for (int i = 0; i < 10; i++) {
-      Document2 doc = iw.newDocument();
+      Document doc = iw.newDocument();
       doc.add(new FacetField("a", Integer.toString(i)));
       iw.addDocument(config.build(taxoWriter, doc));
     }
@@ -631,7 +628,7 @@ public class TestTaxonomyFacetCounts ext
 
   private void indexTwoDocs(TaxonomyWriter taxoWriter, IndexWriter indexWriter, FacetsConfig config, boolean withContent) throws Exception {
     for (int i = 0; i < 2; i++) {
-      Document2 doc = indexWriter.newDocument();
+      Document doc = indexWriter.newDocument();
       if (withContent) {
         doc.addAtom("f", "a");
       }
@@ -697,7 +694,7 @@ public class TestTaxonomyFacetCounts ext
     int numDims = TestUtil.nextInt(random(), 1, 7);
     List<TestDoc> testDocs = getRandomDocs(tokens, numDocs, numDims);
     for(TestDoc testDoc : testDocs) {
-      Document2 doc = w.newDocument();
+      Document doc = w.newDocument();
       doc.addAtom("content", testDoc.content);
       for(int j=0;j<numDims;j++) {
         if (testDoc.dims[j] != null) {

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java Sun Nov 30 11:07:09 2014
@@ -28,8 +28,6 @@ import java.util.Random;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.document.StringField;
 import org.apache.lucene.facet.FacetField;
 import org.apache.lucene.facet.FacetResult;
 import org.apache.lucene.facet.FacetTestCase;
@@ -118,10 +116,10 @@ public class TestTaxonomyFacetCounts2 ex
   }
 
   private static void addField(Document doc) {
-    doc.add(new StringField(A.field(), A.text(), Store.NO));
+    doc.addAtom(A.field(), A.text());
   }
 
-  private static void addFacets(Document doc, FacetsConfig config, boolean updateTermExpectedCounts) 
+  private static void addFacets(Document doc, FacetsConfig config, boolean updateTermExpectedCounts)
       throws IOException {
     List<FacetField> docCategories = randomCategories(random());
     for (FacetField ff : docCategories) {
@@ -155,7 +153,7 @@ public class TestTaxonomyFacetCounts2 ex
   private static void indexDocsNoFacets(IndexWriter indexWriter) throws IOException {
     int numDocs = atLeast(2);
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
+      Document doc = indexWriter.newDocument();
       addField(doc);
       indexWriter.addDocument(doc);
     }
@@ -168,7 +166,7 @@ public class TestTaxonomyFacetCounts2 ex
     int numDocs = atLeast(random, 2);
     FacetsConfig config = getConfig();
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
+      Document doc = indexWriter.newDocument();
       addFacets(doc, config, false);
       indexWriter.addDocument(config.build(taxoWriter, doc));
     }
@@ -181,7 +179,7 @@ public class TestTaxonomyFacetCounts2 ex
     int numDocs = atLeast(random, 2);
     FacetsConfig config = getConfig();
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
+      Document doc = indexWriter.newDocument();
       addFacets(doc, config, true);
       addField(doc);
       indexWriter.addDocument(config.build(taxoWriter, doc));
@@ -195,7 +193,7 @@ public class TestTaxonomyFacetCounts2 ex
     int numDocs = atLeast(random, 2);
     FacetsConfig config = getConfig();
     for (int i = 0; i < numDocs; i++) {
-      Document doc = new Document();
+      Document doc = indexWriter.newDocument();
       boolean hasContent = random.nextBoolean();
       if (hasContent) {
         addField(doc);

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java Sun Nov 30 11:07:09 2014
@@ -24,12 +24,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FloatDocValuesField;
-import org.apache.lucene.document.NumericDocValuesField;
-import org.apache.lucene.document.StringField;
 import org.apache.lucene.facet.FacetField;
 import org.apache.lucene.facet.FacetResult;
 import org.apache.lucene.facet.FacetTestCase;
@@ -79,28 +74,28 @@ public class TestTaxonomyFacetSumValueSo
 
     // Reused across documents, to add the necessary facet
     // fields:
-    Document doc = new Document();
-    doc.add(new NumericDocValuesField("num", 10));
+    Document doc = writer.newDocument();
+    doc.addInt("num", 10);
     doc.add(new FacetField("Author", "Bob"));
     writer.addDocument(config.build(taxoWriter, doc));
 
-    doc = new Document();
-    doc.add(new NumericDocValuesField("num", 20));
+    doc = writer.newDocument();
+    doc.addInt("num", 20);
     doc.add(new FacetField("Author", "Lisa"));
     writer.addDocument(config.build(taxoWriter, doc));
 
-    doc = new Document();
-    doc.add(new NumericDocValuesField("num", 30));
+    doc = writer.newDocument();
+    doc.addInt("num", 30);
     doc.add(new FacetField("Author", "Lisa"));
     writer.addDocument(config.build(taxoWriter, doc));
 
-    doc = new Document();
-    doc.add(new NumericDocValuesField("num", 40));
+    doc = writer.newDocument();
+    doc.addInt("num", 40);
     doc.add(new FacetField("Author", "Susan"));
     writer.addDocument(config.build(taxoWriter, doc));
 
-    doc = new Document();
-    doc.add(new NumericDocValuesField("num", 45));
+    doc = writer.newDocument();
+    doc.addInt("num", 45);
     doc.add(new FacetField("Author", "Frank"));
     writer.addDocument(config.build(taxoWriter, doc));
 
@@ -144,8 +139,8 @@ public class TestTaxonomyFacetSumValueSo
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     FacetsConfig config = new FacetsConfig();
 
-    Document doc = new Document();
-    doc.add(new NumericDocValuesField("num", 10));
+    Document doc = writer.newDocument();
+    doc.addInt("num", 10);
     doc.add(new FacetField("a", "foo1"));
     writer.addDocument(config.build(taxoWriter, doc));
 
@@ -153,8 +148,8 @@ public class TestTaxonomyFacetSumValueSo
       writer.commit();
     }
 
-    doc = new Document();
-    doc.add(new NumericDocValuesField("num", 20));
+    doc = writer.newDocument();
+    doc.addInt("num", 20);
     doc.add(new FacetField("a", "foo2"));
     doc.add(new FacetField("b", "bar1"));
     writer.addDocument(config.build(taxoWriter, doc));
@@ -163,8 +158,8 @@ public class TestTaxonomyFacetSumValueSo
       writer.commit();
     }
 
-    doc = new Document();
-    doc.add(new NumericDocValuesField("num", 30));
+    doc = writer.newDocument();
+    doc.addInt("num", 30);
     doc.add(new FacetField("a", "foo3"));
     doc.add(new FacetField("b", "bar2"));
     doc.add(new FacetField("c", "baz1"));
@@ -208,8 +203,8 @@ public class TestTaxonomyFacetSumValueSo
 
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
 
-    Document doc = new Document();
-    doc.add(new NumericDocValuesField("num", 10));
+    Document doc = writer.newDocument();
+    doc.addInt("num", 10);
     doc.add(new FacetField("a", "foo1"));
     writer.addDocument(config.build(taxoWriter, doc));
 
@@ -257,9 +252,9 @@ public class TestTaxonomyFacetSumValueSo
     FacetsConfig config = new FacetsConfig();
 
     for(int i = atLeast(30); i > 0; --i) {
-      Document doc = new Document();
+      Document doc = iw.newDocument();
       if (random().nextBoolean()) { // don't match all documents
-        doc.add(new StringField("f", "v", Field.Store.NO));
+        doc.addAtom("f", "v");
       }
       doc.add(new FacetField("dim", "a"));
       iw.addDocument(config.build(taxoWriter, doc));
@@ -291,8 +286,8 @@ public class TestTaxonomyFacetSumValueSo
     IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
     FacetsConfig config = new FacetsConfig();
     for (int i = 0; i < 4; i++) {
-      Document doc = new Document();
-      doc.add(new NumericDocValuesField("price", (i+1)));
+      Document doc = iw.newDocument();
+      doc.addInt("price", (i+1));
       doc.add(new FacetField("a", Integer.toString(i % 2)));
       iw.addDocument(config.build(taxoWriter, doc));
     }
@@ -318,8 +313,8 @@ public class TestTaxonomyFacetSumValueSo
 
     FacetsConfig config = new FacetsConfig();
     for (int i = 0; i < 4; i++) {
-      Document doc = new Document();
-      doc.add(new NumericDocValuesField("price", (i+1)));
+      Document doc = iw.newDocument();
+      doc.addInt("price", (i+1));
       doc.add(new FacetField("a", Integer.toString(i % 2)));
       iw.addDocument(config.build(taxoWriter, doc));
     }
@@ -372,8 +367,8 @@ public class TestTaxonomyFacetSumValueSo
     //config.setRequireDimCount("a", true);
     
     for (int i = 0; i < 4; i++) {
-      Document doc = new Document();
-      doc.add(new NumericDocValuesField("price", (i+1)));
+      Document doc = iw.newDocument();
+      doc.addInt("price", (i+1));
       doc.add(new FacetField("a", Integer.toString(i % 2), "1"));
       iw.addDocument(config.build(taxoWriter, doc));
     }
@@ -402,8 +397,8 @@ public class TestTaxonomyFacetSumValueSo
     config.setIndexFieldName("b", "$b");
     
     for(int i = atLeast(30); i > 0; --i) {
-      Document doc = new Document();
-      doc.add(new StringField("f", "v", Field.Store.NO));
+      Document doc = iw.newDocument();
+      doc.addAtom("f", "v");
       doc.add(new FacetField("a", "1"));
       doc.add(new FacetField("b", "1"));
       iw.addDocument(config.build(taxoWriter, doc));
@@ -436,10 +431,10 @@ public class TestTaxonomyFacetSumValueSo
     int numDims = TestUtil.nextInt(random(), 1, 7);
     List<TestDoc> testDocs = getRandomDocs(tokens, numDocs, numDims);
     for(TestDoc testDoc : testDocs) {
-      Document doc = new Document();
-      doc.add(newStringField("content", testDoc.content, Field.Store.NO));
+      Document doc = w.newDocument();
+      doc.addAtom("content", testDoc.content);
       testDoc.value = random().nextFloat();
-      doc.add(new FloatDocValuesField("value", testDoc.value));
+      doc.addFloat("value", testDoc.value);
       for(int j=0;j<numDims;j++) {
         if (testDoc.dims[j] != null) {
           doc.add(new FacetField("dim" + j, testDoc.dims[j]));

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java Sun Nov 30 11:07:09 2014
@@ -10,9 +10,9 @@ import org.apache.lucene.facet.FacetFiel
 import org.apache.lucene.facet.FacetTestCase;
 import org.apache.lucene.facet.FacetsConfig;
 import org.apache.lucene.facet.taxonomy.FacetLabel;
-import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
 import org.apache.lucene.facet.taxonomy.writercache.Cl2oTaxonomyWriterCache;
 import org.apache.lucene.facet.taxonomy.writercache.LruTaxonomyWriterCache;
+import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.store.Directory;
@@ -100,7 +100,7 @@ public class TestConcurrentFacetedIndexi
           Random random = random();
           while (numDocs.decrementAndGet() > 0) {
             try {
-              Document doc = new Document();
+              Document doc = iw.newDocument();
               int numCats = random.nextInt(3) + 1; // 1-3
               while (numCats-- > 0) {
                 FacetField ff = newCategory();

Modified: lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java (original)
+++ lucene/dev/branches/lucene6005/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java Sun Nov 30 11:07:09 2014
@@ -8,7 +8,6 @@ import java.util.concurrent.ConcurrentHa
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.facet.DrillDownQuery;
 import org.apache.lucene.facet.FacetField;
@@ -449,7 +448,7 @@ public class TestDirectoryTaxonomyWriter
     FacetField ff = new FacetField("dim", bigs);
     FacetLabel cp = new FacetLabel("dim", bigs);
     ordinal = taxoWriter.addCategory(cp);
-    Document2 doc = indexWriter.newDocument();
+    Document doc = indexWriter.newDocument();
     doc.add(ff);
     indexWriter.addDocument(config.build(taxoWriter, doc));
 

Modified: lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java (original)
+++ lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java Sun Nov 30 11:07:09 2014
@@ -28,12 +28,7 @@ import java.util.Locale;
 import java.util.Map;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.BinaryDocValuesField;
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.NumericDocValuesField;
-import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.IndexReader;
@@ -66,69 +61,68 @@ public class AllGroupHeadsCollectorTest 
         random(),
         dir,
         newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
-    DocValuesType valueType = DocValuesType.SORTED;
 
     // 0
-    Document doc = new Document();
-    addGroupField(doc, groupField, "author1", valueType);
-    doc.add(newTextField("content", "random text", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 1));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("1")));
+    Document doc = w.newDocument();
+    addGroupField(doc, groupField, "author1");
+    doc.addLargeText("content", "random text");
+    doc.addInt("id_1", 1);
+    doc.addAtom("id_2", new BytesRef("1"));
     w.addDocument(doc);
 
     // 1
-    doc = new Document();
-    addGroupField(doc, groupField, "author1", valueType);
-    doc.add(newTextField("content", "some more random text blob", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 2));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("2")));
+    doc = w.newDocument();
+    addGroupField(doc, groupField, "author1");
+    doc.addLargeText("content", "some more random text blob");
+    doc.addInt("id_1", 2);
+    doc.addAtom("id_2", new BytesRef("2"));
     w.addDocument(doc);
 
     // 2
-    doc = new Document();
-    addGroupField(doc, groupField, "author1", valueType);
-    doc.add(newTextField("content", "some more random textual data", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 3));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("3")));
+    doc = w.newDocument();
+    addGroupField(doc, groupField, "author1");
+    doc.addLargeText("content", "some more random textual data");
+    doc.addInt("id_1", 3);
+    doc.addAtom("id_2", new BytesRef("3"));
     w.addDocument(doc);
     w.commit(); // To ensure a second segment
 
     // 3
-    doc = new Document();
-    addGroupField(doc, groupField, "author2", valueType);
-    doc.add(newTextField("content", "some random text", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 4));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("4")));
+    doc = w.newDocument();
+    addGroupField(doc, groupField, "author2");
+    doc.addLargeText("content", "some random text");
+    doc.addInt("id_1", 4);
+    doc.addAtom("id_2", new BytesRef("4"));
     w.addDocument(doc);
 
     // 4
-    doc = new Document();
-    addGroupField(doc, groupField, "author3", valueType);
-    doc.add(newTextField("content", "some more random text", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 5));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("5")));
+    doc = w.newDocument();
+    addGroupField(doc, groupField, "author3");
+    doc.addLargeText("content", "some more random text");
+    doc.addInt("id_1", 5);
+    doc.addAtom("id_2", new BytesRef("5"));
     w.addDocument(doc);
 
     // 5
-    doc = new Document();
-    addGroupField(doc, groupField, "author3", valueType);
-    doc.add(newTextField("content", "random blob", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 6));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("6")));
+    doc = w.newDocument();
+    addGroupField(doc, groupField, "author3");
+    doc.addLargeText("content", "random blob");
+    doc.addInt("id_1", 6);
+    doc.addAtom("id_2", new BytesRef("6"));
     w.addDocument(doc);
 
     // 6 -- no author field
-    doc = new Document();
-    doc.add(newTextField("content", "random word stuck in alot of other text", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 6));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("6")));
+    doc = w.newDocument();
+    doc.addLargeText("content", "random word stuck in alot of other text");
+    doc.addInt("id_1", 6);
+    doc.addAtom("id_2", new BytesRef("6"));
     w.addDocument(doc);
 
     // 7 -- no author field
-    doc = new Document();
-    doc.add(newTextField("content", "random word stuck in alot of other text", Field.Store.NO));
-    doc.add(new NumericDocValuesField("id_1", 7));
-    doc.add(new SortedDocValuesField("id_2", new BytesRef("7")));
+    doc = w.newDocument();
+    doc.addLargeText("content", "random word stuck in alot of other text");
+    doc.addInt("id_1", 7);
+    doc.addAtom("id_2", new BytesRef("7"));
     w.addDocument(doc);
 
     IndexReader reader = w.getReader();
@@ -246,7 +240,7 @@ public class AllGroupHeadsCollectorTest 
 
         groupDocs[i] = groupDoc;
 
-        Document2 doc = w.newDocument();
+        Document doc = w.newDocument();
         if (groupDoc.group != null) {
           doc.addAtom("group", new BytesRef(groupDoc.group.utf8ToString()));
         }
@@ -494,19 +488,8 @@ public class AllGroupHeadsCollectorTest 
     return collector;
   }
 
-  private void addGroupField(Document doc, String groupField, String value, DocValuesType valueType) {
-    Field valuesField = null;
-    switch(valueType) {
-      case BINARY:
-        valuesField = new BinaryDocValuesField(groupField, new BytesRef(value));
-        break;
-      case SORTED:
-        valuesField = new SortedDocValuesField(groupField, new BytesRef(value));
-        break;
-      default:
-        fail("unhandled type");
-    }
-    doc.add(valuesField);
+  private void addGroupField(Document doc, String groupField, String value) {
+    doc.addBinary(groupField, new BytesRef(value));
   }
 
   private static class GroupDoc {

Modified: lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java (original)
+++ lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java Sun Nov 30 11:07:09 2014
@@ -17,12 +17,10 @@ package org.apache.lucene.search.groupin
  * limitations under the License.
  */
 
+import java.util.HashMap;
+
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.SortedDocValuesField;
-import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.queries.function.ValueSource;
@@ -32,18 +30,13 @@ import org.apache.lucene.search.TermQuer
 import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector;
 import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 
-import java.util.HashMap;
-
 public class AllGroupsCollectorTest extends LuceneTestCase {
 
   public void testTotalGroupCount() throws Exception {
 
     final String groupField = "author";
-    FieldType customType = new FieldType();
-    customType.setStored(true);
 
     Directory dir = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(
@@ -52,52 +45,52 @@ public class AllGroupsCollectorTest exte
         newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
 
     // 0
-    Document doc = new Document();
+    Document doc = w.newDocument();
     addGroupField(doc, groupField, "author1");
-    doc.add(new TextField("content", "random text", Field.Store.YES));
-    doc.add(new Field("id", "1", customType));
+    doc.addLargeText("content", "random text");
+    doc.addStored("id", "1");
     w.addDocument(doc);
 
     // 1
-    doc = new Document();
+    doc = w.newDocument();
     addGroupField(doc, groupField, "author1");
-    doc.add(new TextField("content", "some more random text blob", Field.Store.YES));
-    doc.add(new Field("id", "2", customType));
+    doc.addLargeText("content", "some more random text blob");
+    doc.addStored("id", "2");
     w.addDocument(doc);
 
     // 2
-    doc = new Document();
+    doc = w.newDocument();
     addGroupField(doc, groupField, "author1");
-    doc.add(new TextField("content", "some more random textual data", Field.Store.YES));
-    doc.add(new Field("id", "3", customType));
+    doc.addLargeText("content", "some more random textual data");
+    doc.addStored("id", "3");
     w.addDocument(doc);
     w.commit(); // To ensure a second segment
 
     // 3
-    doc = new Document();
+    doc = w.newDocument();
     addGroupField(doc, groupField, "author2");
-    doc.add(new TextField("content", "some random text", Field.Store.YES));
-    doc.add(new Field("id", "4", customType));
+    doc.addLargeText("content", "some random text");
+    doc.addStored("id", "4");
     w.addDocument(doc);
 
     // 4
-    doc = new Document();
+    doc = w.newDocument();
     addGroupField(doc, groupField, "author3");
-    doc.add(new TextField("content", "some more random text", Field.Store.YES));
-    doc.add(new Field("id", "5", customType));
+    doc.addLargeText("content", "some more random text");
+    doc.addStored("id", "5");
     w.addDocument(doc);
 
     // 5
-    doc = new Document();
+    doc = w.newDocument();
     addGroupField(doc, groupField, "author3");
-    doc.add(new TextField("content", "random blob", Field.Store.YES));
-    doc.add(new Field("id", "6", customType));
+    doc.addLargeText("content", "random blob");
+    doc.addStored("id", "6");
     w.addDocument(doc);
 
     // 6 -- no author field
-    doc = new Document();
-    doc.add(new TextField("content", "random word stuck in alot of other text", Field.Store.YES));
-    doc.add(new Field("id", "6", customType));
+    doc = w.newDocument();
+    doc.addLargeText("content", "random word stuck in alot of other text");
+    doc.addStored("id", "6");
     w.addDocument(doc);
 
     IndexSearcher indexSearcher = newSearcher(w.getReader());
@@ -120,8 +113,7 @@ public class AllGroupsCollectorTest exte
   }
 
   private void addGroupField(Document doc, String groupField, String value) {
-    doc.add(new TextField(groupField, value, Field.Store.YES));
-    doc.add(new SortedDocValuesField(groupField, new BytesRef(value)));
+    doc.addAtom(groupField, value);
   }
 
   private AbstractAllGroupsCollector<?> createRandomCollector(String groupField) {

Modified: lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java?rev=1642535&r1=1642534&r2=1642535&view=diff
==============================================================================
--- lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java (original)
+++ lucene/dev/branches/lucene6005/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java Sun Nov 30 11:07:09 2014
@@ -32,12 +32,7 @@ import java.util.Random;
 import java.util.Set;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document2;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.SortedDocValuesField;
-import org.apache.lucene.document.StringField;
-import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
@@ -71,58 +66,58 @@ public class DistinctValuesCollectorTest
         random,
         dir,
         newIndexWriterConfig(new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
-    Document doc = new Document();
+    Document doc = w.newDocument();
     addField(doc, groupField, "1");
     addField(doc, countField, "1");
-    doc.add(new TextField("content", "random text", Field.Store.NO));
-    doc.add(new StringField("id", "1", Field.Store.NO));
+    doc.addLargeText("content", "random text");
+    doc.addAtom("id", "1");
     w.addDocument(doc);
 
     // 1
-    doc = new Document();
+    doc = w.newDocument();
     addField(doc, groupField, "1");
     addField(doc, countField, "1");
-    doc.add(new TextField("content", "some more random text blob", Field.Store.NO));
-    doc.add(new StringField("id", "2", Field.Store.NO));
+    doc.addLargeText("content", "some more random text blob");
+    doc.addAtom("id", "2");
     w.addDocument(doc);
 
     // 2
-    doc = new Document();
+    doc = w.newDocument();
     addField(doc, groupField, "1");
     addField(doc, countField, "2");
-    doc.add(new TextField("content", "some more random textual data", Field.Store.NO));
-    doc.add(new StringField("id", "3", Field.Store.NO));
+    doc.addLargeText("content", "some more random textual data");
+    doc.addAtom("id", "3");
     w.addDocument(doc);
     w.commit(); // To ensure a second segment
 
     // 3 -- no count field
-    doc = new Document();
+    doc = w.newDocument();
     addField(doc, groupField, "2");
-    doc.add(new TextField("content", "some random text", Field.Store.NO));
-    doc.add(new StringField("id", "4", Field.Store.NO));
+    doc.addLargeText("content", "some random text");
+    doc.addAtom("id", "4");
     w.addDocument(doc);
 
     // 4
-    doc = new Document();
+    doc = w.newDocument();
     addField(doc, groupField, "3");
     addField(doc, countField, "1");
-    doc.add(new TextField("content", "some more random text", Field.Store.NO));
-    doc.add(new StringField("id", "5", Field.Store.NO));
+    doc.addLargeText("content", "some more random text");
+    doc.addAtom("id", "5");
     w.addDocument(doc);
 
     // 5
-    doc = new Document();
+    doc = w.newDocument();
     addField(doc, groupField, "3");
     addField(doc, countField, "1");
-    doc.add(new TextField("content", "random blob", Field.Store.NO));
-    doc.add(new StringField("id", "6", Field.Store.NO));
+    doc.addLargeText("content", "random blob");
+    doc.addAtom("id", "6");
     w.addDocument(doc);
 
     // 6 -- no author field
-    doc = new Document();
-    doc.add(new TextField("content", "random word stuck in alot of other text", Field.Store.YES));
+    doc = w.newDocument();
+    doc.addLargeText("content", "random word stuck in alot of other text");
     addField(doc, countField, "1");
-    doc.add(new StringField("id", "6", Field.Store.NO));
+    doc.addAtom("id", "6");
     w.addDocument(doc);
 
     IndexSearcher indexSearcher = newSearcher(w.getReader());
@@ -348,7 +343,7 @@ public class DistinctValuesCollectorTest
   }
 
   private void addField(Document doc, String field, String value) {
-    doc.add(new SortedDocValuesField(field, new BytesRef(value)));
+    doc.addAtom(field, new BytesRef(value));
   }
 
   @SuppressWarnings({"unchecked","rawtypes"})
@@ -438,23 +433,22 @@ public class DistinctValuesCollectorTest
       }
       countsVals.add(countValue);
 
-      Document doc = new Document();
-      doc.add(new StringField("id", String.format(Locale.ROOT, "%09d", i), Field.Store.YES));
-      doc.add(new SortedDocValuesField("id", new BytesRef(String.format(Locale.ROOT, "%09d", i))));
+      Document doc = w.newDocument();
+      doc.addAtom("id", String.format(Locale.ROOT, "%09d", i));
       if (groupValue != null) {
         addField(doc, groupField, groupValue);
       }
       if (countValue != null) {
         addField(doc, countField, countValue);
       }
-      doc.add(new TextField("content", content, Field.Store.YES));
+      doc.addLargeText("content", content);
       w.addDocument(doc);
     }
 
     DirectoryReader reader = w.getReader();
     if (VERBOSE) {
       for(int docID=0;docID<reader.maxDoc();docID++) {
-        Document2 doc = reader.document(docID);
+        Document doc = reader.document(docID);
         System.out.println("docID=" + docID + " id=" + doc.getString("id") + " content=" + doc.getString("content") + " author=" + doc.getString("author") + " publisher=" + doc.getString("publisher"));
       }
     }