You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by sa...@apache.org on 2015/06/19 00:11:48 UTC

svn commit: r1686329 - in /lucene/dev/branches/branch_5x: ./ solr/ solr/core/ solr/core/src/java/org/apache/solr/rest/schema/ solr/core/src/java/org/apache/solr/schema/ solr/core/src/test/org/apache/solr/rest/schema/

Author: sarowe
Date: Thu Jun 18 22:11:48 2015
New Revision: 1686329

URL: http://svn.apache.org/r1686329
Log:
SOLR-7697: Schema API doesn't take class or luceneMatchVersion attributes into account for the analyzer when adding a new field type. (merged trunk r1686327)

Modified:
    lucene/dev/branches/branch_5x/   (props changed)
    lucene/dev/branches/branch_5x/solr/   (props changed)
    lucene/dev/branches/branch_5x/solr/CHANGES.txt   (contents, props changed)
    lucene/dev/branches/branch_5x/solr/core/   (props changed)
    lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
    lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/schema/FieldType.java
    lucene/dev/branches/branch_5x/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java

Modified: lucene/dev/branches/branch_5x/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/solr/CHANGES.txt?rev=1686329&r1=1686328&r2=1686329&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/solr/CHANGES.txt (original)
+++ lucene/dev/branches/branch_5x/solr/CHANGES.txt Thu Jun 18 22:11:48 2015
@@ -108,6 +108,9 @@ Bug Fixes
 
 * SOLR-7689: ReRankQuery rewrite method can change the QueryResultKey causing cache misses.
   (Emad Nashed, Yonik Seeley, Joel Bernstein)
+  
+* SOLR-7697: Schema API doesn't take class or luceneMatchVersion attributes into
+  account for the analyzer when adding a new field type. (Marius Grama, Steve Rowe)
 
 Optimizations
 ----------------------

Modified: lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java?rev=1686329&r1=1686328&r2=1686329&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java (original)
+++ lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java Thu Jun 18 22:11:48 2015
@@ -83,33 +83,45 @@ public class FieldTypeXmlAdapter {
   }
   
   @SuppressWarnings("unchecked")
-  protected static Element createAnalyzerElement(Document doc, String type, Map<String,?> json) {
-    Element analyzer = doc.createElement("analyzer");
+  protected static Element createAnalyzerElement(Document doc, String type, Map<String,?> analyzer) {
+    Element analyzerElem = appendAttrs(doc.createElement("analyzer"), analyzer);
     if (type != null)
-      analyzer.setAttribute("type", type);
+      analyzerElem.setAttribute("type", type);
+
+    List<Map<String,?>> charFilters = (List<Map<String,?>>)analyzer.get("charFilters");
+    Map<String,?> tokenizer = (Map<String,?>)analyzer.get("tokenizer");
+    List<Map<String,?>> filters = (List<Map<String,?>>)analyzer.get("filters");
+
+    if (analyzer.get("class") == null) {
+      if (charFilters != null)
+        appendFilterElements(doc, analyzerElem, "charFilter", charFilters);
+
+      if (tokenizer == null)
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Analyzer must define a tokenizer!");
+
+      if (tokenizer.get("class") == null)
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Every tokenizer must define a class property!");
+
+      analyzerElem.appendChild(appendAttrs(doc.createElement("tokenizer"), tokenizer));
+
+      if (filters != null)
+        appendFilterElements(doc, analyzerElem, "filter", filters);
+
+    } else { // When analyzer class is specified: char filters, tokenizers, and filters are disallowed
+      if (charFilters != null)
+        throw new SolrException
+            (ErrorCode.BAD_REQUEST, "An analyzer with a class property may not define any char filters!");
+
+      if (tokenizer != null)
+        throw new SolrException
+            (ErrorCode.BAD_REQUEST, "An analyzer with a class property may not define a tokenizer!");
+
+      if (filters != null)
+        throw new SolrException
+            (ErrorCode.BAD_REQUEST, "An analyzer with a class property may not define any filters!");
+    }
     
-    // charFilter(s)
-    List<Map<String,?>> charFilters = (List<Map<String,?>>)json.get("charFilters");
-    if (charFilters != null)
-      appendFilterElements(doc, analyzer, "charFilter", charFilters);
-    
-    // tokenizer
-    Map<String,?> tokenizerJson = (Map<String,?>)json.get("tokenizer");
-    if (tokenizerJson == null)
-      throw new SolrException(ErrorCode.BAD_REQUEST, "Analyzer must define a tokenizer!");
-    
-    String tokClass = (String)tokenizerJson.get("class");
-    if (tokClass == null)
-      throw new SolrException(ErrorCode.BAD_REQUEST, "Every tokenizer must define a class property!");
-    
-    analyzer.appendChild(appendAttrs(doc.createElement("tokenizer"), tokenizerJson));
-    
-    // filter(s)
-    List<Map<String,?>> filters = (List<Map<String,?>>)json.get("filters");
-    if (filters != null)
-      appendFilterElements(doc, analyzer, "filter", filters);
-    
-    return analyzer;
+    return analyzerElem;
   }
   
   protected static void appendFilterElements(Document doc, Element analyzer, String filterName, List<Map<String,?>> filters) {

Modified: lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/schema/FieldType.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/schema/FieldType.java?rev=1686329&r1=1686328&r2=1686329&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/schema/FieldType.java (original)
+++ lucene/dev/branches/branch_5x/solr/core/src/java/org/apache/solr/schema/FieldType.java Thu Jun 18 22:11:48 2015
@@ -52,6 +52,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.CharsRef;
 import org.apache.lucene.util.CharsRefBuilder;
+import org.apache.lucene.util.Version;
 import org.apache.solr.analysis.SolrAnalyzer;
 import org.apache.solr.analysis.TokenizerChain;
 import org.apache.solr.common.SolrException;
@@ -870,9 +871,9 @@ public abstract class FieldType extends
   }
 
   /** 
-   * Returns a description of the given analyzer, by either reporting the Analyzer name
-   * if it's not a TokenizerChain, or if it is, querying each analysis factory for its
-   * name and args.
+   * Returns a description of the given analyzer, by either reporting the Analyzer class
+   * name (and optionally luceneMatchVersion) if it's not a TokenizerChain, or if it is,
+   * querying each analysis factory for its name and args.
    */
   protected static SimpleOrderedMap<Object> getAnalyzerProperties(Analyzer analyzer) {
     SimpleOrderedMap<Object> analyzerProps = new SimpleOrderedMap<>();
@@ -950,6 +951,9 @@ public abstract class FieldType extends
       }
     } else { // analyzer is not instanceof TokenizerChain
       analyzerProps.add(CLASS_NAME, analyzer.getClass().getName());
+      if (analyzer.getVersion() != Version.LATEST) {
+        analyzerProps.add(LUCENE_MATCH_VERSION_PARAM, analyzer.getVersion().toString());
+      }
     }
     return analyzerProps;
   }

Modified: lucene/dev/branches/branch_5x/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_5x/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java?rev=1686329&r1=1686328&r2=1686329&view=diff
==============================================================================
--- lucene/dev/branches/branch_5x/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java (original)
+++ lucene/dev/branches/branch_5x/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java Thu Jun 18 22:11:48 2015
@@ -91,6 +91,67 @@ public class TestBulkSchemaAPI extends R
     assertTrue (((String)errorList.get(0)).contains("is a required field"));
 
   }
+  
+  public void testAnalyzerClass() throws Exception {
+
+    String addFieldTypeAnalyzerWithClass = "{\n" +
+        "'add-field-type' : {" +
+        "    'name' : 'myNewTextFieldWithAnalyzerClass',\n" +
+        "    'class':'solr.TextField',\n" +
+        "    'analyzer' : {\n" +
+        "        'luceneMatchVersion':'5.0.0',\n" +
+        "        'class':'org.apache.lucene.analysis.core.WhitespaceAnalyzer'\n";
+    String charFilters =
+        "        'charFilters' : [{\n" +
+        "            'class':'solr.PatternReplaceCharFilterFactory',\n" +
+        "            'replacement':'$1$1',\n" +
+        "            'pattern':'([a-zA-Z])\\\\\\\\1+'\n" +
+        "        }],\n";
+    String tokenizer =
+        "        'tokenizer' : { 'class':'solr.WhitespaceTokenizerFactory' },\n";
+    String filters =
+        "        'filters' : [{ 'class':'solr.ASCIIFoldingFilterFactory' }]\n";
+    String suffix =
+        "    }\n"+
+        "}}";
+
+    String response = restTestHarness.post("/schema?wt=json",
+        json(addFieldTypeAnalyzerWithClass + ',' + charFilters + tokenizer + filters + suffix));
+    Map map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+    List list = (List)map.get("errors");
+    List errorList = (List)((Map)list.get(0)).get("errorMessages");
+    assertEquals(1, errorList.size());
+    assertTrue (((String)errorList.get(0)).contains
+        ("An analyzer with a class property may not define any char filters!"));
+
+    response = restTestHarness.post("/schema?wt=json",
+        json(addFieldTypeAnalyzerWithClass + ',' + tokenizer + filters + suffix));
+    map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+    list = (List)map.get("errors");
+    errorList = (List)((Map)list.get(0)).get("errorMessages");
+    assertEquals(1, errorList.size());
+    assertTrue (((String)errorList.get(0)).contains
+        ("An analyzer with a class property may not define a tokenizer!"));
+
+    response = restTestHarness.post("/schema?wt=json",
+        json(addFieldTypeAnalyzerWithClass + ',' + filters + suffix));
+    map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+    list = (List)map.get("errors");
+    errorList = (List)((Map)list.get(0)).get("errorMessages");
+    assertEquals(1, errorList.size());
+    assertTrue (((String)errorList.get(0)).contains
+        ("An analyzer with a class property may not define any filters!"));
+
+    response = restTestHarness.post("/schema?wt=json", json(addFieldTypeAnalyzerWithClass + suffix));
+    map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+    assertNull(response, map.get("errors"));
+
+    map = getObj(restTestHarness, "myNewTextFieldWithAnalyzerClass", "fieldTypes");
+    assertNotNull(map);
+    Map analyzer = (Map)map.get("analyzer");
+    assertEquals("org.apache.lucene.analysis.core.WhitespaceAnalyzer", String.valueOf(analyzer.get("class")));
+    assertEquals("5.0.0", String.valueOf(analyzer.get("luceneMatchVersion")));
+  }
 
 
   public void testMultipleCommands() throws Exception{
@@ -192,6 +253,16 @@ public class TestBulkSchemaAPI extends R
         "                       'stored':true,\n" +
         "                       'indexed':true\n" +
         "                       },\n" +
+        "          'add-field-type' : {" +
+        "                       'name' : 'myWhitespaceTxtField',\n" +
+        "                       'class':'solr.TextField',\n" +
+        "                       'analyzer' : {'class' : 'org.apache.lucene.analysis.core.WhitespaceAnalyzer'}\n" +
+        "                       },\n"+
+        "          'add-field' : {\n" +
+        "                       'name':'a5',\n" +
+        "                       'type': 'myWhitespaceTxtField',\n" +
+        "                       'stored':true\n" +
+        "                       },\n" +
         "          'delete-field' : {'name':'wdf_nocase'},\n" +
         "          'delete-field-type' : {'name':'wdf_nocase'},\n" +
         "          'delete-dynamic-field' : {'name':'*_tt'},\n" +
@@ -269,6 +340,13 @@ public class TestBulkSchemaAPI extends R
     assertNotNull("field a3 not created", m);
     assertEquals("myNewTxtField", m.get("type"));
 
+    m = getObj(harness, "myWhitespaceTxtField", "fieldTypes");
+    assertNotNull(m);
+
+    m = getObj(harness, "a5", "fields");
+    assertNotNull("field a5 not created", m);
+    assertEquals("myWhitespaceTxtField", m.get("type"));
+
     m = getObj(harness, "wdf_nocase", "fields");
     assertNull("field 'wdf_nocase' not deleted", m);