You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by si...@apache.org on 2010/10/20 14:44:39 UTC
svn commit: r1025539 [4/10] - in /lucene/dev/branches/docvalues: ./ lucene/
lucene/contrib/
lucene/contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/
lucene/contrib/highlighter/src/test/
lucene/contrib/instantiated/src/test/org/apache...
Modified: lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.java (original)
+++ lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.java Wed Oct 20 12:44:28 2010
@@ -1,4 +1,4 @@
-/* The following code was generated by JFlex 1.5.0-SNAPSHOT on 10/3/10 9:07 AM */
+/* The following code was generated by JFlex 1.5.0-SNAPSHOT on 10/13/10 12:12 AM */
package org.apache.lucene.analysis.standard;
@@ -85,107 +85,109 @@ public final class UAX29Tokenizer extend
"\1\0\u01ca\1\4\0\14\1\16\0\5\1\7\0\1\1\1\0\1\1"+
"\21\0\160\2\5\1\1\0\2\1\2\0\4\1\1\6\7\0\1\1"+
"\1\5\3\1\1\0\1\1\1\0\24\1\1\0\123\1\1\0\213\1"+
- "\1\0\7\2\234\1\13\0\46\1\2\0\1\1\7\0\47\1\1\0"+
+ "\1\0\7\2\236\1\11\0\46\1\2\0\1\1\7\0\47\1\1\0"+
"\1\6\7\0\55\2\1\0\1\2\1\0\2\2\1\0\2\2\1\0"+
"\1\2\10\0\33\1\5\0\4\1\1\5\13\0\4\2\10\0\2\6"+
- "\2\0\13\2\6\0\52\1\24\2\1\0\12\3\1\0\1\3\1\6"+
- "\1\0\2\1\1\2\143\1\1\0\1\1\17\2\2\1\2\2\1\0"+
- "\4\2\2\1\12\3\3\1\2\0\1\1\17\0\1\2\1\1\1\2"+
- "\36\1\33\2\2\0\131\1\13\2\1\1\16\0\12\3\41\1\11\2"+
- "\2\1\2\0\1\6\1\0\1\1\5\0\26\1\4\2\1\1\11\2"+
- "\1\1\3\2\1\1\5\2\322\0\4\2\66\1\2\0\1\2\1\1"+
- "\21\2\1\0\1\1\5\2\2\0\12\1\2\2\2\0\12\3\1\0"+
- "\2\1\6\0\7\1\1\0\3\2\1\0\10\1\2\0\2\1\2\0"+
- "\26\1\1\0\7\1\1\0\1\1\3\0\4\1\2\0\1\2\1\1"+
- "\7\2\2\0\2\2\2\0\3\2\1\1\10\0\1\2\4\0\2\1"+
- "\1\0\3\1\2\2\2\0\12\3\2\1\17\0\3\2\1\0\6\1"+
- "\4\0\2\1\2\0\26\1\1\0\7\1\1\0\2\1\1\0\2\1"+
- "\1\0\2\1\2\0\1\2\1\0\5\2\4\0\2\2\2\0\3\2"+
- "\3\0\1\2\7\0\4\1\1\0\1\1\7\0\12\3\2\2\3\1"+
- "\1\2\13\0\3\2\1\0\11\1\1\0\3\1\1\0\26\1\1\0"+
- "\7\1\1\0\2\1\1\0\5\1\2\0\1\2\1\1\10\2\1\0"+
- "\3\2\1\0\3\2\2\0\1\1\17\0\2\1\2\2\2\0\12\3"+
- "\21\0\3\2\1\0\10\1\2\0\2\1\2\0\26\1\1\0\7\1"+
- "\1\0\2\1\1\0\5\1\2\0\1\2\1\1\7\2\2\0\2\2"+
- "\2\0\3\2\10\0\2\2\4\0\2\1\1\0\3\1\2\2\2\0"+
- "\12\3\1\0\1\1\20\0\1\2\1\1\1\0\6\1\3\0\3\1"+
- "\1\0\4\1\3\0\2\1\1\0\1\1\1\0\2\1\3\0\2\1"+
- "\3\0\3\1\3\0\14\1\4\0\5\2\3\0\3\2\1\0\4\2"+
- "\2\0\1\1\6\0\1\2\16\0\12\3\21\0\3\2\1\0\10\1"+
- "\1\0\3\1\1\0\27\1\1\0\12\1\1\0\5\1\3\0\1\1"+
- "\7\2\1\0\3\2\1\0\4\2\7\0\2\2\1\0\2\1\6\0"+
- "\2\1\2\2\2\0\12\3\22\0\2\2\1\0\10\1\1\0\3\1"+
- "\1\0\27\1\1\0\12\1\1\0\5\1\2\0\1\2\1\1\7\2"+
- "\1\0\3\2\1\0\4\2\7\0\2\2\7\0\1\1\1\0\2\1"+
- "\2\2\2\0\12\3\22\0\2\2\1\0\10\1\1\0\3\1\1\0"+
- "\27\1\1\0\20\1\3\0\1\1\7\2\1\0\3\2\1\0\4\2"+
- "\11\0\1\2\10\0\2\1\2\2\2\0\12\3\12\0\6\1\2\0"+
- "\2\2\1\0\22\1\3\0\30\1\1\0\11\1\1\0\1\1\2\0"+
- "\7\1\3\0\1\2\4\0\6\2\1\0\1\2\1\0\10\2\22\0"+
- "\2\2\15\0\60\11\1\12\2\11\7\12\5\0\7\11\10\12\1\0"+
- "\12\3\47\0\2\11\1\0\1\11\2\0\2\11\1\0\1\11\2\0"+
- "\1\11\6\0\4\11\1\0\7\11\1\0\3\11\1\0\1\11\1\0"+
- "\1\11\2\0\2\11\1\0\4\11\1\12\2\11\6\12\1\0\2\12"+
- "\1\11\2\0\5\11\1\0\1\11\1\0\6\12\2\0\12\3\2\0"+
- "\2\11\42\0\1\1\27\0\2\2\6\0\12\3\13\0\1\2\1\0"+
- "\1\2\1\0\1\2\4\0\2\2\10\1\1\0\44\1\4\0\24\2"+
- "\1\0\2\2\4\1\4\0\10\2\1\0\44\2\11\0\1\2\71\0"+
- "\53\11\24\12\1\11\12\3\6\0\6\11\4\12\4\11\3\12\1\11"+
- "\3\12\2\11\7\12\3\11\4\12\15\11\14\12\1\11\1\12\12\3"+
- "\4\12\2\11\46\1\12\0\53\1\1\0\1\1\3\0\u0149\1\1\0"+
- "\4\1\2\0\7\1\1\0\1\1\1\0\4\1\2\0\51\1\1\0"+
- "\4\1\2\0\41\1\1\0\4\1\2\0\7\1\1\0\1\1\1\0"+
- "\4\1\2\0\17\1\1\0\71\1\1\0\4\1\2\0\103\1\4\0"+
- "\1\2\40\0\20\1\20\0\125\1\14\0\u026c\1\2\0\21\1\1\0"+
- "\32\1\5\0\113\1\3\0\3\1\17\0\15\1\1\0\4\1\3\2"+
- "\13\0\22\1\3\2\13\0\22\1\2\2\14\0\15\1\1\0\3\1"+
- "\1\0\2\2\14\0\64\11\40\12\3\0\1\11\4\0\1\11\1\12"+
- "\2\0\12\3\41\0\3\2\2\0\12\3\6\0\130\1\10\0\51\1"+
- "\1\2\1\1\5\0\106\1\12\0\35\1\3\0\14\2\4\0\14\2"+
- "\12\0\12\3\36\11\2\0\5\11\13\0\54\11\4\0\21\12\7\11"+
- "\2\12\6\0\13\3\3\0\2\11\40\0\27\1\5\2\4\0\65\11"+
- "\12\12\1\0\35\12\2\0\1\2\12\3\6\0\12\3\6\0\16\11"+
- "\122\0\5\2\57\1\21\2\7\1\4\0\12\3\21\0\11\2\14\0"+
- "\3\2\36\1\12\2\3\0\2\1\12\3\106\0\44\1\24\2\10\0"+
- "\12\3\3\0\3\1\12\3\44\1\122\0\3\2\1\0\25\2\4\1"+
- "\1\2\4\1\1\2\15\0\300\1\47\2\26\0\3\2\u0116\1\2\0"+
- "\6\1\2\0\46\1\2\0\6\1\2\0\10\1\1\0\1\1\1\0"+
- "\1\1\1\0\1\1\1\0\37\1\2\0\65\1\1\0\7\1\1\0"+
- "\1\1\3\0\3\1\1\0\7\1\3\0\4\1\2\0\6\1\4\0"+
- "\15\1\5\0\3\1\1\0\7\1\17\0\4\2\10\0\2\7\12\0"+
- "\1\7\2\0\1\5\2\0\5\2\20\0\2\10\3\0\1\6\17\0"+
- "\1\10\13\0\5\2\5\0\6\2\1\0\1\1\15\0\1\1\20\0"+
- "\5\1\73\0\41\2\21\0\1\1\4\0\1\1\2\0\12\1\1\0"+
- "\1\1\3\0\5\1\6\0\1\1\1\0\1\1\1\0\1\1\1\0"+
- "\4\1\1\0\13\1\2\0\4\1\5\0\5\1\4\0\1\1\21\0"+
- "\51\1\u032d\0\64\1\u0716\0\57\1\1\0\57\1\1\0\205\1\6\0"+
- "\4\1\3\2\16\0\46\1\12\0\66\1\11\0\1\1\20\0\27\1"+
- "\11\0\7\1\1\0\7\1\1\0\7\1\1\0\7\1\1\0\7\1"+
- "\1\0\7\1\1\0\7\1\1\0\7\1\1\0\40\2\57\0\1\1"+
- "\120\0\32\13\1\0\131\13\14\0\326\13\57\0\1\1\1\0\1\13"+
- "\31\0\11\13\6\2\1\0\5\4\2\0\3\13\1\1\1\1\4\0"+
- "\126\14\2\0\2\2\2\4\3\14\133\4\1\0\4\4\5\0\51\1"+
- "\3\0\136\1\21\0\30\1\70\0\20\4\320\0\57\4\1\0\130\4"+
- "\250\0\u19b6\13\112\0\u51cc\13\64\0\u048d\1\103\0\56\1\2\0\u010d\1"+
- "\3\0\20\1\12\3\2\1\24\0\40\1\2\0\15\1\4\2\11\0"+
- "\2\2\1\0\31\1\10\0\120\1\2\2\45\0\11\1\2\0\147\1"+
- "\2\0\2\1\156\0\7\1\1\2\3\1\1\2\4\1\1\2\27\1"+
- "\5\2\30\0\64\1\14\0\2\2\62\1\21\2\13\0\12\3\6\0"+
- "\22\2\6\1\3\0\1\1\4\0\12\3\34\1\10\2\2\0\27\1"+
- "\15\2\14\0\35\1\3\0\4\2\57\1\16\2\16\0\1\1\12\3"+
- "\46\0\51\1\16\2\11\0\3\1\1\2\10\1\2\2\2\0\12\3"+
- "\6\0\33\11\1\12\4\0\60\11\1\12\1\11\3\12\2\11\2\12"+
- "\5\11\2\12\1\11\1\12\1\11\30\0\5\11\340\0\43\1\10\2"+
- "\1\0\2\2\2\0\12\3\6\0\u2ba4\1\14\0\27\1\4\0\61\1"+
- "\u2104\0\u012e\13\2\0\76\13\2\0\152\13\46\0\7\1\14\0\5\1"+
- "\5\0\1\1\1\2\12\1\1\0\15\1\1\0\5\1\1\0\1\1"+
- "\1\0\2\1\1\0\2\1\1\0\154\1\41\0\u016b\1\22\0\100\1"+
- "\2\0\66\1\50\0\14\1\4\0\20\2\1\6\2\0\1\5\1\6"+
- "\13\0\7\2\14\0\2\10\30\0\3\10\1\6\1\0\1\7\1\0"+
- "\1\6\1\5\32\0\5\1\1\0\207\1\2\0\1\2\7\0\1\7"+
- "\4\0\1\6\1\0\1\7\1\0\12\3\1\5\1\6\5\0\32\1"+
- "\4\0\1\10\1\0\32\1\13\0\70\4\2\2\37\1\3\0\6\1"+
- "\2\0\6\1\2\0\6\1\2\0\3\1\34\0\3\2\4\0";
+ "\2\0\13\2\5\0\53\1\25\2\12\3\1\0\1\3\1\6\1\0"+
+ "\2\1\1\2\143\1\1\0\1\1\10\2\1\0\6\2\2\1\2\2"+
+ "\1\0\4\2\2\1\12\3\3\1\2\0\1\1\17\0\1\2\1\1"+
+ "\1\2\36\1\33\2\2\0\131\1\13\2\1\1\16\0\12\3\41\1"+
+ "\11\2\2\1\2\0\1\6\1\0\1\1\5\0\26\1\4\2\1\1"+
+ "\11\2\1\1\3\2\1\1\5\2\22\0\31\1\3\2\244\0\4\2"+
+ "\66\1\3\2\1\1\22\2\1\1\7\2\12\1\2\2\2\0\12\3"+
+ "\1\0\7\1\1\0\7\1\1\0\3\2\1\0\10\1\2\0\2\1"+
+ "\2\0\26\1\1\0\7\1\1\0\1\1\3\0\4\1\2\0\1\2"+
+ "\1\1\7\2\2\0\2\2\2\0\3\2\1\1\10\0\1\2\4\0"+
+ "\2\1\1\0\3\1\2\2\2\0\12\3\2\1\17\0\3\2\1\0"+
+ "\6\1\4\0\2\1\2\0\26\1\1\0\7\1\1\0\2\1\1\0"+
+ "\2\1\1\0\2\1\2\0\1\2\1\0\5\2\4\0\2\2\2\0"+
+ "\3\2\3\0\1\2\7\0\4\1\1\0\1\1\7\0\12\3\2\2"+
+ "\3\1\1\2\13\0\3\2\1\0\11\1\1\0\3\1\1\0\26\1"+
+ "\1\0\7\1\1\0\2\1\1\0\5\1\2\0\1\2\1\1\10\2"+
+ "\1\0\3\2\1\0\3\2\2\0\1\1\17\0\2\1\2\2\2\0"+
+ "\12\3\21\0\3\2\1\0\10\1\2\0\2\1\2\0\26\1\1\0"+
+ "\7\1\1\0\2\1\1\0\5\1\2\0\1\2\1\1\7\2\2\0"+
+ "\2\2\2\0\3\2\10\0\2\2\4\0\2\1\1\0\3\1\2\2"+
+ "\2\0\12\3\1\0\1\1\20\0\1\2\1\1\1\0\6\1\3\0"+
+ "\3\1\1\0\4\1\3\0\2\1\1\0\1\1\1\0\2\1\3\0"+
+ "\2\1\3\0\3\1\3\0\14\1\4\0\5\2\3\0\3\2\1\0"+
+ "\4\2\2\0\1\1\6\0\1\2\16\0\12\3\21\0\3\2\1\0"+
+ "\10\1\1\0\3\1\1\0\27\1\1\0\12\1\1\0\5\1\3\0"+
+ "\1\1\7\2\1\0\3\2\1\0\4\2\7\0\2\2\1\0\2\1"+
+ "\6\0\2\1\2\2\2\0\12\3\22\0\2\2\1\0\10\1\1\0"+
+ "\3\1\1\0\27\1\1\0\12\1\1\0\5\1\2\0\1\2\1\1"+
+ "\7\2\1\0\3\2\1\0\4\2\7\0\2\2\7\0\1\1\1\0"+
+ "\2\1\2\2\2\0\12\3\1\0\2\1\17\0\2\2\1\0\10\1"+
+ "\1\0\3\1\1\0\51\1\2\0\1\1\7\2\1\0\3\2\1\0"+
+ "\4\2\1\1\10\0\1\2\10\0\2\1\2\2\2\0\12\3\12\0"+
+ "\6\1\2\0\2\2\1\0\22\1\3\0\30\1\1\0\11\1\1\0"+
+ "\1\1\2\0\7\1\3\0\1\2\4\0\6\2\1\0\1\2\1\0"+
+ "\10\2\22\0\2\2\15\0\60\11\1\12\2\11\7\12\5\0\7\11"+
+ "\10\12\1\0\12\3\47\0\2\11\1\0\1\11\2\0\2\11\1\0"+
+ "\1\11\2\0\1\11\6\0\4\11\1\0\7\11\1\0\3\11\1\0"+
+ "\1\11\1\0\1\11\2\0\2\11\1\0\4\11\1\12\2\11\6\12"+
+ "\1\0\2\12\1\11\2\0\5\11\1\0\1\11\1\0\6\12\2\0"+
+ "\12\3\2\0\2\11\42\0\1\1\27\0\2\2\6\0\12\3\13\0"+
+ "\1\2\1\0\1\2\1\0\1\2\4\0\2\2\10\1\1\0\44\1"+
+ "\4\0\24\2\1\0\2\2\5\1\13\2\1\0\44\2\11\0\1\2"+
+ "\71\0\53\11\24\12\1\11\12\3\6\0\6\11\4\12\4\11\3\12"+
+ "\1\11\3\12\2\11\7\12\3\11\4\12\15\11\14\12\1\11\1\12"+
+ "\12\3\4\12\2\11\46\1\12\0\53\1\1\0\1\1\3\0\u0149\1"+
+ "\1\0\4\1\2\0\7\1\1\0\1\1\1\0\4\1\2\0\51\1"+
+ "\1\0\4\1\2\0\41\1\1\0\4\1\2\0\7\1\1\0\1\1"+
+ "\1\0\4\1\2\0\17\1\1\0\71\1\1\0\4\1\2\0\103\1"+
+ "\2\0\3\2\40\0\20\1\20\0\125\1\14\0\u026c\1\2\0\21\1"+
+ "\1\0\32\1\5\0\113\1\3\0\3\1\17\0\15\1\1\0\4\1"+
+ "\3\2\13\0\22\1\3\2\13\0\22\1\2\2\14\0\15\1\1\0"+
+ "\3\1\1\0\2\2\14\0\64\11\40\12\3\0\1\11\4\0\1\11"+
+ "\1\12\2\0\12\3\41\0\3\2\2\0\12\3\6\0\130\1\10\0"+
+ "\51\1\1\2\1\1\5\0\106\1\12\0\35\1\3\0\14\2\4\0"+
+ "\14\2\12\0\12\3\36\11\2\0\5\11\13\0\54\11\4\0\21\12"+
+ "\7\11\2\12\6\0\12\3\1\11\3\0\2\11\40\0\27\1\5\2"+
+ "\4\0\65\11\12\12\1\0\35\12\2\0\1\2\12\3\6\0\12\3"+
+ "\6\0\16\11\122\0\5\2\57\1\21\2\7\1\4\0\12\3\21\0"+
+ "\11\2\14\0\3\2\36\1\12\2\3\0\2\1\12\3\6\0\46\1"+
+ "\16\2\14\0\44\1\24\2\10\0\12\3\3\0\3\1\12\3\44\1"+
+ "\122\0\3\2\1\0\25\2\4\1\1\2\4\1\1\2\15\0\300\1"+
+ "\47\2\25\0\4\2\u0116\1\2\0\6\1\2\0\46\1\2\0\6\1"+
+ "\2\0\10\1\1\0\1\1\1\0\1\1\1\0\1\1\1\0\37\1"+
+ "\2\0\65\1\1\0\7\1\1\0\1\1\3\0\3\1\1\0\7\1"+
+ "\3\0\4\1\2\0\6\1\4\0\15\1\5\0\3\1\1\0\7\1"+
+ "\17\0\4\2\10\0\2\7\12\0\1\7\2\0\1\5\2\0\5\2"+
+ "\20\0\2\10\3\0\1\6\17\0\1\10\13\0\5\2\5\0\6\2"+
+ "\1\0\1\1\15\0\1\1\20\0\15\1\63\0\41\2\21\0\1\1"+
+ "\4\0\1\1\2\0\12\1\1\0\1\1\3\0\5\1\6\0\1\1"+
+ "\1\0\1\1\1\0\1\1\1\0\4\1\1\0\13\1\2\0\4\1"+
+ "\5\0\5\1\4\0\1\1\21\0\51\1\u032d\0\64\1\u0716\0\57\1"+
+ "\1\0\57\1\1\0\205\1\6\0\4\1\3\2\16\0\46\1\12\0"+
+ "\66\1\11\0\1\1\17\0\1\2\27\1\11\0\7\1\1\0\7\1"+
+ "\1\0\7\1\1\0\7\1\1\0\7\1\1\0\7\1\1\0\7\1"+
+ "\1\0\7\1\1\0\40\2\57\0\1\1\120\0\32\13\1\0\131\13"+
+ "\14\0\326\13\57\0\1\1\1\0\1\13\31\0\11\13\6\2\1\0"+
+ "\5\4\2\0\3\13\1\1\1\1\4\0\126\14\2\0\2\2\2\4"+
+ "\3\14\133\4\1\0\4\4\5\0\51\1\3\0\136\1\21\0\33\1"+
+ "\65\0\20\4\320\0\57\4\1\0\130\4\250\0\u19b6\13\112\0\u51cc\13"+
+ "\64\0\u048d\1\103\0\56\1\2\0\u010d\1\3\0\20\1\12\3\2\1"+
+ "\24\0\57\1\4\2\11\0\2\2\1\0\31\1\10\0\120\1\2\2"+
+ "\45\0\11\1\2\0\147\1\2\0\4\1\1\0\2\1\16\0\12\1"+
+ "\120\0\10\1\1\2\3\1\1\2\4\1\1\2\27\1\5\2\30\0"+
+ "\64\1\14\0\2\2\62\1\21\2\13\0\12\3\6\0\22\2\6\1"+
+ "\3\0\1\1\4\0\12\3\34\1\10\2\2\0\27\1\15\2\14\0"+
+ "\35\1\3\0\4\2\57\1\16\2\16\0\1\1\12\3\46\0\51\1"+
+ "\16\2\11\0\3\1\1\2\10\1\2\2\2\0\12\3\6\0\33\11"+
+ "\1\12\4\0\60\11\1\12\1\11\3\12\2\11\2\12\5\11\2\12"+
+ "\1\11\1\12\1\11\30\0\5\11\41\0\6\1\2\0\6\1\2\0"+
+ "\6\1\11\0\7\1\1\0\7\1\221\0\43\1\10\2\1\0\2\2"+
+ "\2\0\12\3\6\0\u2ba4\1\14\0\27\1\4\0\61\1\u2104\0\u012e\13"+
+ "\2\0\76\13\2\0\152\13\46\0\7\1\14\0\5\1\5\0\1\1"+
+ "\1\2\12\1\1\0\15\1\1\0\5\1\1\0\1\1\1\0\2\1"+
+ "\1\0\2\1\1\0\154\1\41\0\u016b\1\22\0\100\1\2\0\66\1"+
+ "\50\0\14\1\4\0\20\2\1\6\2\0\1\5\1\6\13\0\7\2"+
+ "\14\0\2\10\30\0\3\10\1\6\1\0\1\7\1\0\1\6\1\5"+
+ "\32\0\5\1\1\0\207\1\2\0\1\2\7\0\1\7\4\0\1\6"+
+ "\1\0\1\7\1\0\12\3\1\5\1\6\5\0\32\1\4\0\1\10"+
+ "\1\0\32\1\13\0\70\4\2\2\37\1\3\0\6\1\2\0\6\1"+
+ "\2\0\6\1\2\0\3\1\34\0\3\2\4\0";
/**
* Translates characters to character classes
@@ -530,7 +532,7 @@ public final class UAX29Tokenizer extend
char [] map = new char[0x10000];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
- while (i < 2138) {
+ while (i < 2174) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
@@ -803,28 +805,28 @@ public final class UAX29Tokenizer extend
zzMarkedPos = zzMarkedPosL;
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
- case 2:
- { if (populateAttributes(WORD_TYPE)) return true;
+ case 5:
+ { if (populateAttributes(IDEOGRAPHIC_TYPE)) return true;
}
case 7: break;
- case 6:
- { if (populateAttributes(HIRAGANA_TYPE)) return true;
+ case 1:
+ { /* Not numeric, word, ideographic, hiragana, or SE Asian -- ignore it. */
}
case 8: break;
- case 5:
- { if (populateAttributes(IDEOGRAPHIC_TYPE)) return true;
+ case 3:
+ { if (populateAttributes(NUMERIC_TYPE)) return true;
}
case 9: break;
- case 4:
- { if (populateAttributes(SOUTH_EAST_ASIAN_TYPE)) return true;
+ case 6:
+ { if (populateAttributes(HIRAGANA_TYPE)) return true;
}
case 10: break;
- case 3:
- { if (populateAttributes(NUMERIC_TYPE)) return true;
+ case 4:
+ { if (populateAttributes(SOUTH_EAST_ASIAN_TYPE)) return true;
}
case 11: break;
- case 1:
- { /* Not numeric, word, ideographic, hiragana, or SE Asian -- ignore it. */
+ case 2:
+ { if (populateAttributes(WORD_TYPE)) return true;
}
case 12: break;
default:
Modified: lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.jflex
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.jflex?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.jflex (original)
+++ lucene/dev/branches/docvalues/modules/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29Tokenizer.jflex Wed Oct 20 12:44:28 2010
@@ -53,7 +53,7 @@ import org.apache.lucene.util.AttributeS
*/
%%
-%unicode 5.2
+%unicode 6.0
%final
%public
%apiprivate
@@ -247,7 +247,7 @@ ExtendNumLetEx = \p{WB:ExtendNumLet}
// annex. That means that satisfactory treatment of languages like Chinese
// or Thai requires special handling.
//
-// In Unicode 5.2, only one character has the \p{Line_Break = Contingent_Break}
+// In Unicode 6.0, only one character has the \p{Line_Break = Contingent_Break}
// property: U+FFFC (  ) OBJECT REPLACEMENT CHARACTER.
//
// In the ICU implementation of UAX#29, \p{Line_Break = Complex_Context}
Modified: lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestStandardAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestStandardAnalyzer.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestStandardAnalyzer.java (original)
+++ lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestStandardAnalyzer.java Wed Oct 20 12:44:28 2010
@@ -394,7 +394,7 @@ public class TestStandardAnalyzer extend
}
public void testUnicodeWordBreaks() throws Exception {
- WordBreakTestUnicode_5_2_0 wordBreakTest = new WordBreakTestUnicode_5_2_0();
+ WordBreakTestUnicode_6_0_0 wordBreakTest = new WordBreakTestUnicode_6_0_0();
wordBreakTest.test(a);
}
}
Modified: lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestUAX29Tokenizer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestUAX29Tokenizer.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestUAX29Tokenizer.java (original)
+++ lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/TestUAX29Tokenizer.java Wed Oct 20 12:44:28 2010
@@ -198,7 +198,7 @@ public class TestUAX29Tokenizer extends
}
public void testUnicodeWordBreaks() throws Exception {
- WordBreakTestUnicode_5_2_0 wordBreakTest = new WordBreakTestUnicode_5_2_0();
+ WordBreakTestUnicode_6_0_0 wordBreakTest = new WordBreakTestUnicode_6_0_0();
wordBreakTest.test(a);
}
}
Modified: lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/generateJavaUnicodeWordBreakTest.pl
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/generateJavaUnicodeWordBreakTest.pl?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/generateJavaUnicodeWordBreakTest.pl (original)
+++ lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/core/generateJavaUnicodeWordBreakTest.pl Wed Oct 20 12:44:28 2010
@@ -61,6 +61,7 @@ package org.apache.lucene.analysis.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.junit.Ignore;
/**
* This class was automatically generated by ${script_name}
@@ -81,6 +82,7 @@ import org.apache.lucene.analysis.BaseTo
* \\p{WordBreak = Numeric} (Excludes full-width Arabic digits)
* [\\uFF10-\\uFF19] (Full-width Arabic digits)
*/
+\@Ignore
public class ${class_name} extends BaseTokenStreamTestCase {
public void test(Analyzer analyzer) throws Exception {
Propchange: lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestISOLatin1AccentFilter.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Oct 20 12:44:28 2010
@@ -1,5 +1,6 @@
/lucene/dev/branches/branch_3x/modules/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestISOLatin1AccentFilter.java:980654,982195,987811,988512
/lucene/dev/branches/preflexfixes/modules/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestISOLatin1AccentFilter.java:967125-979432
+/lucene/dev/trunk/modules/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestISOLatin1AccentFilter.java:1021635-1024556,1025532-1025536
/lucene/java/branches/flex_1458/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java:824912-931101
/lucene/java/branches/lucene_2_4/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java:748824
/lucene/java/branches/lucene_2_9/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java:829134,829881,831036,896850,909334
Modified: lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/th/TestThaiAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/th/TestThaiAnalyzer.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/th/TestThaiAnalyzer.java (original)
+++ lucene/dev/branches/docvalues/modules/analysis/common/src/test/org/apache/lucene/analysis/th/TestThaiAnalyzer.java Wed Oct 20 12:44:28 2010
@@ -19,7 +19,6 @@ package org.apache.lucene.analysis.th;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.util.Version;
-import org.junit.Assume;
/**
* Test case for ThaiAnalyzer, modified from TestFrenchAnalyzer
@@ -33,7 +32,7 @@ public class TestThaiAnalyzer extends Ba
* testcase for offsets
*/
public void testOffsets() throws Exception {
- Assume.assumeTrue(ThaiWordFilter.DBBI_AVAILABLE);
+ assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
assertAnalyzesTo(new ThaiAnalyzer(TEST_VERSION_CURRENT), "à¸à¸²à¸£à¸à¸µà¹à¹à¸à¹à¸à¹à¸à¸à¹à¸ªà¸à¸à¸§à¹à¸²à¸à¸²à¸à¸à¸µ",
new String[] { "à¸à¸²à¸£", "à¸à¸µà¹", "à¹à¸à¹", "à¸à¹à¸à¸", "à¹à¸ªà¸à¸", "วà¹à¸²", "à¸à¸²à¸", "à¸à¸µ" },
new int[] { 0, 3, 6, 9, 13, 17, 20, 23 },
@@ -41,6 +40,7 @@ public class TestThaiAnalyzer extends Ba
}
public void testTokenType() throws Exception {
+ assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
assertAnalyzesTo(new ThaiAnalyzer(TEST_VERSION_CURRENT), "à¸à¸²à¸£à¸à¸µà¹à¹à¸à¹à¸à¹à¸à¸à¹à¸ªà¸à¸à¸§à¹à¸²à¸à¸²à¸à¸à¸µ à¹à¹à¹",
new String[] { "à¸à¸²à¸£", "à¸à¸µà¹", "à¹à¸à¹", "à¸à¹à¸à¸", "à¹à¸ªà¸à¸", "วà¹à¸²", "à¸à¸²à¸", "à¸à¸µ", "à¹à¹à¹" },
new String[] { "<SOUTHEAST_ASIAN>", "<SOUTHEAST_ASIAN>",
@@ -56,7 +56,7 @@ public class TestThaiAnalyzer extends Ba
*/
@Deprecated
public void testBuggyTokenType30() throws Exception {
- Assume.assumeTrue(ThaiWordFilter.DBBI_AVAILABLE);
+ assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
assertAnalyzesTo(new ThaiAnalyzer(Version.LUCENE_30), "à¸à¸²à¸£à¸à¸µà¹à¹à¸à¹à¸à¹à¸à¸à¹à¸ªà¸à¸à¸§à¹à¸²à¸à¸²à¸à¸à¸µ à¹à¹à¹",
new String[] { "à¸à¸²à¸£", "à¸à¸µà¹", "à¹à¸à¹", "à¸à¹à¸à¸", "à¹à¸ªà¸à¸", "วà¹à¸²", "à¸à¸²à¸", "à¸à¸µ", "à¹à¹à¹" },
new String[] { "<ALPHANUM>", "<ALPHANUM>", "<ALPHANUM>",
@@ -67,7 +67,7 @@ public class TestThaiAnalyzer extends Ba
/** @deprecated testing backwards behavior */
@Deprecated
public void testAnalyzer30() throws Exception {
- Assume.assumeTrue(ThaiWordFilter.DBBI_AVAILABLE);
+ assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
ThaiAnalyzer analyzer = new ThaiAnalyzer(Version.LUCENE_30);
assertAnalyzesTo(analyzer, "", new String[] {});
@@ -93,7 +93,7 @@ public class TestThaiAnalyzer extends Ba
* Test that position increments are adjusted correctly for stopwords.
*/
public void testPositionIncrements() throws Exception {
- Assume.assumeTrue(ThaiWordFilter.DBBI_AVAILABLE);
+ assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
ThaiAnalyzer analyzer = new ThaiAnalyzer(TEST_VERSION_CURRENT);
assertAnalyzesTo(new ThaiAnalyzer(TEST_VERSION_CURRENT), "à¸à¸²à¸£à¸à¸µà¹à¹à¸à¹à¸à¹à¸à¸ the à¹à¸ªà¸à¸à¸§à¹à¸²à¸à¸²à¸à¸à¸µ",
@@ -111,7 +111,7 @@ public class TestThaiAnalyzer extends Ba
}
public void testReusableTokenStream() throws Exception {
- Assume.assumeTrue(ThaiWordFilter.DBBI_AVAILABLE);
+ assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
ThaiAnalyzer analyzer = new ThaiAnalyzer(TEST_VERSION_CURRENT);
assertAnalyzesToReuse(analyzer, "", new String[] {});
@@ -129,6 +129,7 @@ public class TestThaiAnalyzer extends Ba
/** @deprecated, for version back compat */
@Deprecated
public void testReusableTokenStream30() throws Exception {
+ assumeTrue("JRE does not support Thai dictionary-based BreakIterator", ThaiWordFilter.DBBI_AVAILABLE);
ThaiAnalyzer analyzer = new ThaiAnalyzer(Version.LUCENE_30);
assertAnalyzesToReuse(analyzer, "", new String[] {});
Propchange: lucene/dev/branches/docvalues/solr/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Oct 20 12:44:28 2010
@@ -1,5 +1,6 @@
/lucene/dev/branches/branch_3x/solr:949730,957490*,961612,979161,980654,982195,987811,988512
/lucene/dev/branches/preflexfixes/solr:967125-979432
+/lucene/dev/trunk/solr:1021635-1024556,1025532-1025536
/lucene/java/branches/lucene_2_4/solr:748824
/lucene/java/branches/lucene_2_9/solr:817269-818600,825998,829134,829881,831036,896850,909334
/lucene/java/branches/lucene_2_9_back_compat_tests/solr:818601-821336
Modified: lucene/dev/branches/docvalues/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/CHANGES.txt?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/CHANGES.txt (original)
+++ lucene/dev/branches/docvalues/solr/CHANGES.txt Wed Oct 20 12:44:28 2010
@@ -291,6 +291,10 @@ New Features
* SOLR-2010: Added ability to verify that spell checking collations have
actual results in the index. (James Dyer via gsingers)
+
+* SOLR-1873: SolrCloud - added shared/central config and core/shard managment via zookeeper,
+ built-in load balancing, and infrastructure for future SolrCloud work.
+ (yonik, Mark Miller)
Optimizations
----------------------
@@ -519,6 +523,13 @@ Bug Fixes
* SOLR-2148: Highlighter doesn't support q.alt. (koji)
+* SOLR-2139: Wrong cast from string to float (Igor Rodionov via koji)
+
+* SOLR-2157 Suggester should return alpha-sorted results when onlyMorePopular=false (ab)
+
+* SOLR-1794: Dataimport of CLOB fields fails when getCharacterStream() is
+ defined in a superclass. (Gunnar Gauslaa Bergem via rmuir)
+
Other Changes
----------------------
Modified: lucene/dev/branches/docvalues/solr/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/build.xml?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/build.xml (original)
+++ lucene/dev/branches/docvalues/solr/build.xml Wed Oct 20 12:44:28 2010
@@ -435,7 +435,7 @@
<sysproperty key="tests.multiplier" value="${tests.multiplier}"/>
<sysproperty key="tests.iter" value="${tests.iter}"/>
<sysproperty key="tests.seed" value="${tests.seed}"/>
- <sysproperty key="jetty.insecurerandom" value="1"/>
+ <sysproperty key="jetty.testMode" value="1"/>
<sysproperty key="tempDir" file="@{tempDir}/@{threadNum}"/>
<sysproperty key="testmethod" value="${testmethod}"/>
<!-- TODO: why is this unconditionally set to "" above? disable for now
Modified: lucene/dev/branches/docvalues/solr/contrib/clustering/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/clustering/build.xml?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/clustering/build.xml (original)
+++ lucene/dev/branches/docvalues/solr/contrib/clustering/build.xml Wed Oct 20 12:44:28 2010
@@ -116,7 +116,7 @@
<sysproperty key="tests.multiplier" value="${tests.multiplier}"/>
<sysproperty key="tests.seed" value="${tests.seed}"/>
<sysproperty key="tests.iter" value="${tests.iter}"/>
- <sysproperty key="jetty.insecurerandom" value="1"/>
+ <sysproperty key="jetty.testMode" value="1"/>
<sysproperty key="tempDir" file="${junit.output.dir}"/>
<sysproperty key="testmethod" value="${testmethod}"/>
<jvmarg line="${args}"/>
Modified: lucene/dev/branches/docvalues/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/clustering/src/test/java/org/apache/solr/handler/clustering/ClusteringComponentTest.java Wed Oct 20 12:44:28 2010
@@ -24,6 +24,7 @@ import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.component.QueryComponent;
import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.request.LocalSolrQueryRequest;
+import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.request.SolrRequestHandler;
import org.junit.Test;
@@ -54,12 +55,13 @@ public class ClusteringComponentTest ext
SolrQueryResponse rsp;
rsp = new SolrQueryResponse();
rsp.add("responseHeader", new SimpleOrderedMap());
- handler.handleRequest(new LocalSolrQueryRequest(core, params), rsp);
+ SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
+ handler.handleRequest(req, rsp);
NamedList values = rsp.getValues();
Object clusters = values.get("clusters");
//System.out.println("Clusters: " + clusters);
assertTrue("clusters is null and it shouldn't be", clusters != null);
-
+ req.close();
params = new ModifiableSolrParams();
params.add(ClusteringComponent.COMPONENT_NAME, "true");
@@ -71,13 +73,13 @@ public class ClusteringComponentTest ext
rsp = new SolrQueryResponse();
rsp.add("responseHeader", new SimpleOrderedMap());
- handler.handleRequest(new LocalSolrQueryRequest(core, params), rsp);
+ req = new LocalSolrQueryRequest(core, params);
+ handler.handleRequest(req, rsp);
values = rsp.getValues();
clusters = values.get("clusters");
//System.out.println("Clusters: " + clusters);
assertTrue("clusters is null and it shouldn't be", clusters != null);
-
-
+ req.close();
}
}
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/build.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/build.xml?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/build.xml (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/build.xml Wed Oct 20 12:44:28 2010
@@ -168,7 +168,7 @@
<sysproperty key="tests.multiplier" value="${tests.multiplier}"/>
<sysproperty key="tests.iter" value="${tests.iter}"/>
<sysproperty key="tests.seed" value="${tests.seed}"/>
- <sysproperty key="jetty.insecurerandom" value="1"/>
+ <sysproperty key="jetty.testMode" value="1"/>
<sysproperty key="tempDir" file="${tempDir}"/>
<sysproperty key="testmethod" value="${testmethod}"/>
<jvmarg line="${args}"/>
@@ -226,7 +226,7 @@
<sysproperty key="tests.multiplier" value="${tests.multiplier}"/>
<sysproperty key="tests.iter" value="${tests.iter}"/>
<sysproperty key="tests.seed" value="${tests.seed}"/>
- <sysproperty key="jetty.insecurerandom" value="1"/>
+ <sysproperty key="jetty.testMode" value="1"/>
<sysproperty key="tempDir" file="${tempDir}"/>
<sysproperty key="testmethod" value="${testmethod}"/>
<jvmarg line="${args}"/>
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestMailEntityProcessor.java Wed Oct 20 12:44:28 2010
@@ -16,9 +16,6 @@
*/
package org.apache.solr.handler.dataimport;
-import junit.framework.Assert;
-
-import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrInputDocument;
import org.junit.Ignore;
import org.junit.Test;
@@ -47,7 +44,7 @@ import java.util.Map;
* @see org.apache.solr.handler.dataimport.MailEntityProcessor
* @since solr 1.4
*/
-public class TestMailEntityProcessor extends SolrTestCaseJ4 {
+public class TestMailEntityProcessor extends AbstractDataImportHandlerTestCase {
// Credentials
private static final String user = "user";
@@ -58,7 +55,7 @@ public class TestMailEntityProcessor ext
private static Map<String, String> paramMap = new HashMap<String, String>();
@Test
- @Ignore
+ @Ignore("Needs a Mock Mail Server to work")
public void testConnection() {
// also tests recurse = false and default settings
paramMap.put("folders", "top2");
@@ -72,11 +69,11 @@ public class TestMailEntityProcessor ext
rp.command = "full-import";
SolrWriterImpl swi = new SolrWriterImpl();
di.runCmd(rp, swi);
- Assert.assertEquals("top1 did not return 2 messages", swi.docs.size(), 2);
+ assertEquals("top1 did not return 2 messages", swi.docs.size(), 2);
}
@Test
- @Ignore
+ @Ignore("Needs a Mock Mail Server to work")
public void testRecursion() {
paramMap.put("folders", "top2");
paramMap.put("recurse", "true");
@@ -89,11 +86,11 @@ public class TestMailEntityProcessor ext
rp.command = "full-import";
SolrWriterImpl swi = new SolrWriterImpl();
di.runCmd(rp, swi);
- Assert.assertEquals("top2 and its children did not return 8 messages", swi.docs.size(), 8);
+ assertEquals("top2 and its children did not return 8 messages", swi.docs.size(), 8);
}
@Test
- @Ignore
+ @Ignore("Needs a Mock Mail Server to work")
public void testExclude() {
paramMap.put("folders", "top2");
paramMap.put("recurse", "true");
@@ -107,11 +104,11 @@ public class TestMailEntityProcessor ext
rp.command = "full-import";
SolrWriterImpl swi = new SolrWriterImpl();
di.runCmd(rp, swi);
- Assert.assertEquals("top2 and its direct children did not return 5 messages", swi.docs.size(), 5);
+ assertEquals("top2 and its direct children did not return 5 messages", swi.docs.size(), 5);
}
@Test
- @Ignore
+ @Ignore("Needs a Mock Mail Server to work")
public void testInclude() {
paramMap.put("folders", "top2");
paramMap.put("recurse", "true");
@@ -125,11 +122,11 @@ public class TestMailEntityProcessor ext
rp.command = "full-import";
SolrWriterImpl swi = new SolrWriterImpl();
di.runCmd(rp, swi);
- Assert.assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3);
+ assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3);
}
@Test
- @Ignore
+ @Ignore("Needs a Mock Mail Server to work")
public void testIncludeAndExclude() {
paramMap.put("folders", "top1,top2");
paramMap.put("recurse", "true");
@@ -144,11 +141,11 @@ public class TestMailEntityProcessor ext
rp.command = "full-import";
SolrWriterImpl swi = new SolrWriterImpl();
di.runCmd(rp, swi);
- Assert.assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3);
+ assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3);
}
@Test
- @Ignore
+ @Ignore("Needs a Mock Mail Server to work")
public void testFetchTimeSince() throws ParseException {
paramMap.put("folders", "top1/child11");
paramMap.put("recurse", "true");
@@ -162,7 +159,7 @@ public class TestMailEntityProcessor ext
rp.command = "full-import";
SolrWriterImpl swi = new SolrWriterImpl();
di.runCmd(rp, swi);
- Assert.assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3);
+ assertEquals("top2 and its direct children did not return 3 messages", swi.docs.size(), 3);
}
private String getConfigFromMap(Map<String, String> params) {
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/extras/test/java/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java Wed Oct 20 12:44:28 2010
@@ -16,31 +16,16 @@
*/
package org.apache.solr.handler.dataimport;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.BeforeClass;
/**Testcase for TikaEntityProcessor
* @version $Id$
* @since solr 1.5
*/
public class TestTikaEntityProcessor extends AbstractDataImportHandlerTestCase {
-
- @Before
- public void setUp() throws Exception {
- super.setUp();
- }
-
- @After
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- public String getSchemaFile() {
- return "dataimport-schema-no-unique-key.xml";
- }
-
- public String getSolrConfigFile() {
- return "dataimport-solrconfig.xml";
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ initCore("dataimport-solrconfig.xml", "dataimport-schema-no-unique-key.xml");
}
public void testIndexingWithTikaEntityProcessor() throws Exception {
@@ -55,7 +40,7 @@ public class TestTikaEntityProcessor ext
" </entity>" +
" </document>" +
"</dataConfig>";
- super.runFullImport(conf);
+ runFullImport(conf);
assertQ(req("*:*"), "//*[@numFound='1']");
}
}
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java Wed Oct 20 12:44:28 2010
@@ -22,11 +22,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.lang.reflect.Modifier;
import java.sql.Blob;
import java.sql.Clob;
+import java.sql.SQLException;
import java.util.Properties;
/**
@@ -83,16 +81,7 @@ public class FieldReaderDataSource exten
} else if (o instanceof Blob) {
Blob blob = (Blob) o;
try {
- //Most of the JDBC drivers have getBinaryStream defined as public
- // so let us just check it
- Method m = blob.getClass().getDeclaredMethod("getBinaryStream");
- if (Modifier.isPublic(m.getModifiers())) {
- return getReader(m, blob);
- } else {
- // force invoke
- m.setAccessible(true);
- return getReader(m, blob);
- }
+ return getReader(blob);
} catch (Exception e) {
LOG.info("Unable to get data from BLOB");
return null;
@@ -106,27 +95,19 @@ public class FieldReaderDataSource exten
static Reader readCharStream(Clob clob) {
try {
- Method m = clob.getClass().getDeclaredMethod("getCharacterStream");
- if (Modifier.isPublic(m.getModifiers())) {
- return (Reader) m.invoke(clob);
- } else {
- // force invoke
- m.setAccessible(true);
- return (Reader) m.invoke(clob);
- }
+ return clob.getCharacterStream();
} catch (Exception e) {
wrapAndThrow(SEVERE, e,"Unable to get reader from clob");
return null;//unreachable
}
}
- private Reader getReader(Method m, Blob blob)
- throws IllegalAccessException, InvocationTargetException, UnsupportedEncodingException {
- InputStream is = (InputStream) m.invoke(blob);
+ private Reader getReader(Blob blob)
+ throws SQLException, UnsupportedEncodingException {
if (encoding == null) {
- return (new InputStreamReader(is));
+ return (new InputStreamReader(blob.getBinaryStream()));
} else {
- return (new InputStreamReader(is, encoding));
+ return (new InputStreamReader(blob.getBinaryStream(), encoding));
}
}
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java Wed Oct 20 12:44:28 2010
@@ -16,11 +16,14 @@
*/
package org.apache.solr.handler.dataimport;
+import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.LocalSolrQueryRequest;
-import org.apache.solr.util.AbstractSolrTestCase;
import org.apache.solr.common.util.NamedList;
+import org.junit.After;
+import org.junit.Before;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.io.File;
import java.util.HashMap;
@@ -38,14 +41,16 @@ import java.util.Map;
* @since solr 1.3
*/
public abstract class AbstractDataImportHandlerTestCase extends
- AbstractSolrTestCase {
+ SolrTestCaseJ4 {
@Override
+ @Before
public void setUp() throws Exception {
super.setUp();
}
@Override
+ @After
public void tearDown() throws Exception {
// remove dataimport.properties
File f = new File("solr/conf/dataimport.properties");
@@ -140,6 +145,29 @@ public abstract class AbstractDataImport
return result;
}
+ public static File createFile(File tmpdir, String name, byte[] content,
+ boolean changeModifiedTime) throws IOException {
+ File file = new File(tmpdir.getAbsolutePath() + File.separator + name);
+ file.deleteOnExit();
+ FileOutputStream f = new FileOutputStream(file);
+ f.write(content);
+ f.close();
+ if (changeModifiedTime)
+ file.setLastModified(System.currentTimeMillis() - 3600000);
+ return file;
+ }
+
+ public static Map<String, String> getField(String col, String type,
+ String re, String srcCol, String splitBy) {
+ HashMap<String, String> vals = new HashMap<String, String>();
+ vals.put("column", col);
+ vals.put("type", type);
+ vals.put("regex", re);
+ vals.put("sourceColName", srcCol);
+ vals.put("splitBy", splitBy);
+ return vals;
+ }
+
static class TestContext extends Context {
private final Map<String, String> entityAttrs;
private final Context delegate;
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java Wed Oct 20 12:44:28 2010
@@ -16,8 +16,6 @@
*/
package org.apache.solr.handler.dataimport;
-import org.apache.solr.SolrTestCaseJ4;
-import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
@@ -33,25 +31,23 @@ import java.util.Map;
* @version $Id$
* @since solr 1.3
*/
-public class TestCachedSqlEntityProcessor extends SolrTestCaseJ4 {
+public class TestCachedSqlEntityProcessor extends AbstractDataImportHandlerTestCase {
@Test
public void withoutWhereClause() {
List fields = new ArrayList();
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "id"));
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "desc"));
+ fields.add(createMap("column", "id"));
+ fields.add(createMap("column", "desc"));
String q = "select * from x where id=${x.id}";
- Map<String, String> entityAttrs = AbstractDataImportHandlerTestCase.createMap(
- "query", q);
+ Map<String, String> entityAttrs = createMap("query", q);
MockDataSource ds = new MockDataSource();
VariableResolverImpl vr = new VariableResolverImpl();
- vr.addNamespace("x", AbstractDataImportHandlerTestCase.createMap("id", 1));
- Context context = AbstractDataImportHandlerTestCase.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
+ vr.addNamespace("x", createMap("id", 1));
+ Context context = getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>();
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 1, "desc", "one"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 1, "desc",
- "another one"));
+ rows.add(createMap("id", 1, "desc", "one"));
+ rows.add(createMap("id", 1, "desc", "another one"));
MockDataSource.setIterator(vr.replaceTokens(q), rows.iterator());
EntityProcessor csep = new EntityProcessorWrapper( new CachedSqlEntityProcessor(), null);
csep.init(context);
@@ -62,7 +58,7 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(2, rows.size());
+ assertEquals(2, rows.size());
ds.close();
csep.init(context);
rows = new ArrayList<Map<String, Object>>();
@@ -72,28 +68,27 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(2, rows.size());
- Assert.assertEquals(2, rows.get(0).size());
- Assert.assertEquals(2, rows.get(1).size());
+ assertEquals(2, rows.size());
+ assertEquals(2, rows.get(0).size());
+ assertEquals(2, rows.get(1).size());
}
@Test
public void withoutWhereClauseWithTransformers() {
List fields = new ArrayList();
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "id"));
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "desc"));
+ fields.add(createMap("column", "id"));
+ fields.add(createMap("column", "desc"));
String q = "select * from x where id=${x.id}";
- Map<String, String> entityAttrs = AbstractDataImportHandlerTestCase.createMap(
+ Map<String, String> entityAttrs = createMap(
"query", q, "transformer", UppercaseTransformer.class.getName());
MockDataSource ds = new MockDataSource();
VariableResolverImpl vr = new VariableResolverImpl();
- vr.addNamespace("x", AbstractDataImportHandlerTestCase.createMap("id", 1));
- Context context = AbstractDataImportHandlerTestCase.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
+ vr.addNamespace("x", createMap("id", 1));
+ Context context = getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>();
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 1, "desc", "one"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 1, "desc",
- "another one"));
+ rows.add(createMap("id", 1, "desc", "one"));
+ rows.add(createMap("id", 1, "desc", "another one"));
MockDataSource.setIterator(vr.replaceTokens(q), rows.iterator());
EntityProcessor csep = new EntityProcessorWrapper( new CachedSqlEntityProcessor(), null);
csep.init(context);
@@ -104,7 +99,7 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(2, rows.size());
+ assertEquals(2, rows.size());
ds.close();
csep.init(context);
rows = new ArrayList<Map<String, Object>>();
@@ -113,30 +108,29 @@ public class TestCachedSqlEntityProcesso
if (r == null)
break;
rows.add(r);
- Assert.assertEquals(r.get("desc").toString().toUpperCase(Locale.ENGLISH), r.get("desc"));
+ assertEquals(r.get("desc").toString().toUpperCase(Locale.ENGLISH), r.get("desc"));
}
- Assert.assertEquals(2, rows.size());
- Assert.assertEquals(2, rows.get(0).size());
- Assert.assertEquals(2, rows.get(1).size());
+ assertEquals(2, rows.size());
+ assertEquals(2, rows.get(0).size());
+ assertEquals(2, rows.get(1).size());
}
@Test
public void withoutWhereClauseWithMultiRowTransformer() {
List fields = new ArrayList();
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "id"));
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "desc"));
+ fields.add(createMap("column", "id"));
+ fields.add(createMap("column", "desc"));
String q = "select * from x where id=${x.id}";
- Map<String, String> entityAttrs = AbstractDataImportHandlerTestCase.createMap(
+ Map<String, String> entityAttrs = createMap(
"query", q, "transformer", DoubleTransformer.class.getName());
MockDataSource ds = new MockDataSource();
VariableResolverImpl vr = new VariableResolverImpl();
- vr.addNamespace("x", AbstractDataImportHandlerTestCase.createMap("id", 1));
- Context context = AbstractDataImportHandlerTestCase.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
+ vr.addNamespace("x", createMap("id", 1));
+ Context context = getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>();
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 1, "desc", "one"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 1, "desc",
- "another one"));
+ rows.add(createMap("id", 1, "desc", "one"));
+ rows.add(createMap("id", 1, "desc", "another one"));
MockDataSource.setIterator(vr.replaceTokens(q), rows.iterator());
EntityProcessor csep = new EntityProcessorWrapper( new CachedSqlEntityProcessor(), null);
csep.init(context);
@@ -147,7 +141,7 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(4, rows.size());
+ assertEquals(4, rows.size());
ds.close();
csep.init(context);
rows = new ArrayList<Map<String, Object>>();
@@ -157,9 +151,9 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(4, rows.size());
- Assert.assertEquals(2, rows.get(0).size());
- Assert.assertEquals(2, rows.get(1).size());
+ assertEquals(4, rows.size());
+ assertEquals(2, rows.get(0).size());
+ assertEquals(2, rows.get(1).size());
}
public static class DoubleTransformer extends Transformer {
@@ -190,43 +184,42 @@ public class TestCachedSqlEntityProcesso
@Test
public void withWhereClause() {
List fields = new ArrayList();
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "id"));
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "desc"));
+ fields.add(createMap("column", "id"));
+ fields.add(createMap("column", "desc"));
String q = "select * from x";
- Map<String, String> entityAttrs = AbstractDataImportHandlerTestCase.createMap(
+ Map<String, String> entityAttrs = createMap(
"query", q, EntityProcessorBase.CACHE_KEY,"id", EntityProcessorBase.CACHE_LOOKUP ,"x.id");
MockDataSource ds = new MockDataSource();
VariableResolverImpl vr = new VariableResolverImpl();
- Map xNamespace = AbstractDataImportHandlerTestCase.createMap("id", 0);
+ Map xNamespace = createMap("id", 0);
vr.addNamespace("x", xNamespace);
- Context context = AbstractDataImportHandlerTestCase.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
+ Context context = getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
doWhereTest(q, context, ds, xNamespace);
}
@Test
public void withKeyAndLookup() {
List fields = new ArrayList();
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "id"));
- fields.add(AbstractDataImportHandlerTestCase.createMap("column", "desc"));
+ fields.add(createMap("column", "id"));
+ fields.add(createMap("column", "desc"));
String q = "select * from x";
- Map<String, String> entityAttrs = AbstractDataImportHandlerTestCase.createMap("query", q, "where", "id=x.id");
+ Map<String, String> entityAttrs = createMap("query", q, "where", "id=x.id");
MockDataSource ds = new MockDataSource();
VariableResolverImpl vr = new VariableResolverImpl();
- Map xNamespace = AbstractDataImportHandlerTestCase.createMap("id", 0);
+ Map xNamespace = createMap("id", 0);
vr.addNamespace("x", xNamespace);
- Context context = AbstractDataImportHandlerTestCase.getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
+ Context context = getContext(null, vr, ds, Context.FULL_DUMP, fields, entityAttrs);
doWhereTest(q, context, ds, xNamespace);
}
private void doWhereTest(String q, Context context, MockDataSource ds, Map xNamespace) {
List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>();
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 1, "desc", "one"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 2, "desc", "two"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 2, "desc",
- "another two"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 3, "desc", "three"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 3, "desc", "another three"));
- rows.add(AbstractDataImportHandlerTestCase.createMap("id", 3, "desc", "another another three"));
+ rows.add(createMap("id", 1, "desc", "one"));
+ rows.add(createMap("id", 2, "desc", "two"));
+ rows.add(createMap("id", 2, "desc", "another two"));
+ rows.add(createMap("id", 3, "desc", "three"));
+ rows.add(createMap("id", 3, "desc", "another three"));
+ rows.add(createMap("id", 3, "desc", "another another three"));
MockDataSource.setIterator(q, rows.iterator());
EntityProcessor csep = new EntityProcessorWrapper(new CachedSqlEntityProcessor(), null);
csep.init(context);
@@ -237,7 +230,7 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(0, rows.size());
+ assertEquals(0, rows.size());
ds.close();
csep.init(context);
@@ -249,7 +242,7 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(2, rows.size());
+ assertEquals(2, rows.size());
csep.init(context);
rows = new ArrayList<Map<String, Object>>();
@@ -260,6 +253,6 @@ public class TestCachedSqlEntityProcesso
break;
rows.add(r);
}
- Assert.assertEquals(3, rows.size());
+ assertEquals(3, rows.size());
}
}
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestClobTransformer.java Wed Oct 20 12:44:28 2010
@@ -16,9 +16,6 @@
*/
package org.apache.solr.handler.dataimport;
-import junit.framework.Assert;
-
-import org.apache.solr.SolrTestCaseJ4;
import org.junit.Test;
import java.io.StringReader;
@@ -35,7 +32,7 @@ import java.util.*;
* @see org.apache.solr.handler.dataimport.ClobTransformer
* @since solr 1.4
*/
-public class TestClobTransformer extends SolrTestCaseJ4 {
+public class TestClobTransformer extends AbstractDataImportHandlerTestCase {
@Test
public void simple() throws Exception {
List<Map<String, String>> flds = new ArrayList<Map<String, String>>();
@@ -45,7 +42,7 @@ public class TestClobTransformer extends
f.put(ClobTransformer.CLOB, "true");
f.put(DataImporter.NAME, "description");
flds.add(f);
- Context ctx = AbstractDataImportHandlerTestCase.getContext(null, new VariableResolverImpl(), null, Context.FULL_DUMP, flds, Collections.EMPTY_MAP);
+ Context ctx = getContext(null, new VariableResolverImpl(), null, Context.FULL_DUMP, flds, Collections.EMPTY_MAP);
Transformer t = new ClobTransformer();
Map<String, Object> row = new HashMap<String, Object>();
Clob clob = (Clob) Proxy.newProxyInstance(this.getClass().getClassLoader(), new Class[]{Clob.class}, new InvocationHandler() {
@@ -59,6 +56,6 @@ public class TestClobTransformer extends
row.put("dsc", clob);
t.transformRow(row, ctx);
- Assert.assertEquals("hello!", row.get("dsc"));
+ assertEquals("hello!", row.get("dsc"));
}
}
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java Wed Oct 20 12:44:28 2010
@@ -16,9 +16,7 @@
*/
package org.apache.solr.handler.dataimport;
-import junit.framework.TestCase;
import org.apache.commons.io.FileUtils;
-import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
import org.apache.solr.client.solrj.request.DirectXmlRequest;
@@ -26,11 +24,10 @@ import org.apache.solr.client.solrj.resp
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.util.AbstractSolrTestCase;
+
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-import static org.junit.Assert.*;
import java.io.File;
import java.util.List;
@@ -41,7 +38,7 @@ import java.util.List;
* @version $Id$
* @since solr 1.4
*/
-public class TestContentStreamDataSource extends SolrTestCaseJ4 {
+public class TestContentStreamDataSource extends AbstractDataImportHandlerTestCase {
private static final String CONF_DIR = "." + File.separator + "solr" + File.separator + "conf" + File.separator;
SolrInstance instance = null;
JettySolrRunner jetty;
@@ -119,7 +116,7 @@ public class TestContentStreamDataSource
public void setUp() throws Exception {
- File home = new File(SolrTestCaseJ4.TEMP_DIR,
+ File home = new File(TEMP_DIR,
getClass().getName() + "-" + System.currentTimeMillis());
@@ -141,7 +138,7 @@ public class TestContentStreamDataSource
}
public void tearDown() throws Exception {
- AbstractSolrTestCase.recurseDelete(homeDir);
+ recurseDelete(homeDir);
}
}
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDataConfig.java Wed Oct 20 12:44:28 2010
@@ -16,7 +16,7 @@
*/
package org.apache.solr.handler.dataimport;
-import junit.framework.Assert;
+import org.junit.BeforeClass;
import org.junit.Test;
import org.w3c.dom.Document;
@@ -34,25 +34,9 @@ import java.util.List;
* @since solr 1.3
*/
public class TestDataConfig extends AbstractDataImportHandlerTestCase {
-
- @Override
- public void setUp() throws Exception {
- super.setUp();
- }
-
- @Override
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- @Override
- public String getSchemaFile() {
- return "dataimport-schema.xml";
- }
-
- @Override
- public String getSolrConfigFile() {
- return "dataimport-nodatasource-solrconfig.xml";
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ initCore("dataimport-nodatasource-solrconfig.xml", "dataimport-schema.xml");
}
@Test
@@ -62,20 +46,20 @@ public class TestDataConfig extends Abst
rows.add(createMap("id", "1", "desc", "one"));
MockDataSource.setIterator("select * from x", rows.iterator());
- super.runFullImport(loadDataConfig("data-config-with-datasource.xml"));
+ runFullImport(loadDataConfig("data-config-with-datasource.xml"));
assertQ(req("id:1"), "//*[@numFound='1']");
}
@Test
- public void basic() throws Exception {
+ public void testBasic() throws Exception {
javax.xml.parsers.DocumentBuilder builder = DocumentBuilderFactory
.newInstance().newDocumentBuilder();
Document doc = builder.parse(new ByteArrayInputStream(xml.getBytes()));
DataConfig dc = new DataConfig();
dc.readFromXml(doc.getDocumentElement());
- Assert.assertEquals("atrimlisting", dc.document.entities.get(0).name);
+ assertEquals("atrimlisting", dc.document.entities.get(0).name);
}
private static final String xml = "<dataConfig>\n"
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDateFormatTransformer.java Wed Oct 20 12:44:28 2010
@@ -16,8 +16,6 @@
*/
package org.apache.solr.handler.dataimport;
-import org.apache.solr.SolrTestCaseJ4;
-import org.junit.Assert;
import org.junit.Test;
import java.text.SimpleDateFormat;
@@ -31,40 +29,37 @@ import java.util.*;
* @version $Id$
* @since solr 1.3
*/
-public class TestDateFormatTransformer extends SolrTestCaseJ4 {
+public class TestDateFormatTransformer extends AbstractDataImportHandlerTestCase {
@Test
@SuppressWarnings("unchecked")
public void testTransformRow_SingleRow() throws Exception {
List fields = new ArrayList();
- fields.add(AbstractDataImportHandlerTestCase.createMap(DataImporter.COLUMN,
- "lastModified"));
- fields.add(AbstractDataImportHandlerTestCase.createMap(DataImporter.COLUMN,
+ fields.add(createMap(DataImporter.COLUMN, "lastModified"));
+ fields.add(createMap(DataImporter.COLUMN,
"dateAdded", RegexTransformer.SRC_COL_NAME, "lastModified",
DateFormatTransformer.DATE_TIME_FMT, "MM/dd/yyyy"));
SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy");
Date now = format.parse(format.format(new Date()));
- Map row = AbstractDataImportHandlerTestCase.createMap("lastModified", format
- .format(now));
+ Map row = createMap("lastModified", format.format(now));
VariableResolverImpl resolver = new VariableResolverImpl();
resolver.addNamespace("e", row);
- Context context = AbstractDataImportHandlerTestCase.getContext(null, resolver,
+ Context context = getContext(null, resolver,
null, Context.FULL_DUMP, fields, null);
new DateFormatTransformer().transformRow(row, context);
- Assert.assertEquals(now, row.get("dateAdded"));
+ assertEquals(now, row.get("dateAdded"));
}
@Test
@SuppressWarnings("unchecked")
public void testTransformRow_MultipleRows() throws Exception {
List fields = new ArrayList();
- fields.add(AbstractDataImportHandlerTestCase.createMap(DataImporter.COLUMN,
- "lastModified"));
- fields.add(AbstractDataImportHandlerTestCase.createMap(DataImporter.COLUMN,
+ fields.add(createMap(DataImporter.COLUMN, "lastModified"));
+ fields.add(createMap(DataImporter.COLUMN,
"dateAdded", RegexTransformer.SRC_COL_NAME, "lastModified",
DateFormatTransformer.DATE_TIME_FMT, "MM/dd/yyyy hh:mm:ss.SSS"));
@@ -81,13 +76,13 @@ public class TestDateFormatTransformer e
VariableResolverImpl resolver = new VariableResolverImpl();
resolver.addNamespace("e", row);
- Context context = AbstractDataImportHandlerTestCase.getContext(null, resolver,
+ Context context = getContext(null, resolver,
null, Context.FULL_DUMP, fields, null);
new DateFormatTransformer().transformRow(row, context);
List output = new ArrayList();
output.add(now1);
output.add(now2);
- Assert.assertEquals(output, row.get("dateAdded"));
+ assertEquals(output, row.get("dateAdded"));
}
}
Modified: lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java?rev=1025539&r1=1025538&r2=1025539&view=diff
==============================================================================
--- lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java (original)
+++ lucene/dev/branches/docvalues/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestDocBuilder.java Wed Oct 20 12:44:28 2010
@@ -16,10 +16,9 @@
*/
package org.apache.solr.handler.dataimport;
-import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrInputDocument;
-import static org.apache.solr.handler.dataimport.AbstractDataImportHandlerTestCase.createMap;
-import org.junit.Assert;
+
+import org.junit.After;
import org.junit.Test;
import java.util.*;
@@ -32,183 +31,155 @@ import java.util.*;
* @version $Id$
* @since solr 1.3
*/
-public class TestDocBuilder extends SolrTestCaseJ4 {
+public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
+ @After
+ public void tearDown() throws Exception {
+ MockDataSource.clearCache();
+ super.tearDown();
+ }
+
@Test
public void loadClass() throws Exception {
Class clz = DocBuilder.loadClass("RegexTransformer", null);
- Assert.assertNotNull(clz);
+ assertNotNull(clz);
}
@Test
public void singleEntityNoRows() {
- try {
- DataImporter di = new DataImporter();
- di.loadAndInit(dc_singleEntity);
- DataConfig cfg = di.getConfig();
- DataConfig.Entity ent = cfg.document.entities.get(0);
- MockDataSource.setIterator("select * from x", new ArrayList().iterator());
- ent.dataSrc = new MockDataSource();
- ent.isDocRoot = true;
- DataImporter.RequestParams rp = new DataImporter.RequestParams();
- rp.command = "full-import";
- SolrWriterImpl swi = new SolrWriterImpl();
- di.runCmd(rp, swi);
- Assert.assertEquals(Boolean.TRUE, swi.deleteAllCalled);
- Assert.assertEquals(Boolean.TRUE, swi.commitCalled);
- Assert.assertEquals(0, swi.docs.size());
- Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount
- .get());
- Assert
- .assertEquals(0, di.getDocBuilder().importStatistics.docCount.get());
- Assert.assertEquals(0, di.getDocBuilder().importStatistics.rowsCount
- .get());
- } finally {
- MockDataSource.clearCache();
- }
+ DataImporter di = new DataImporter();
+ di.loadAndInit(dc_singleEntity);
+ DataConfig cfg = di.getConfig();
+ DataConfig.Entity ent = cfg.document.entities.get(0);
+ MockDataSource.setIterator("select * from x", new ArrayList().iterator());
+ ent.dataSrc = new MockDataSource();
+ ent.isDocRoot = true;
+ DataImporter.RequestParams rp = new DataImporter.RequestParams();
+ rp.command = "full-import";
+ SolrWriterImpl swi = new SolrWriterImpl();
+ di.runCmd(rp, swi);
+ assertEquals(Boolean.TRUE, swi.deleteAllCalled);
+ assertEquals(Boolean.TRUE, swi.commitCalled);
+ assertEquals(0, swi.docs.size());
+ assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
+ assertEquals(0, di.getDocBuilder().importStatistics.docCount.get());
+ assertEquals(0, di.getDocBuilder().importStatistics.rowsCount.get());
}
@Test
public void testDeltaImportNoRows_MustNotCommit() {
- try {
- DataImporter di = new DataImporter();
- di.loadAndInit(dc_deltaConfig);
- DataConfig cfg = di.getConfig();
- DataConfig.Entity ent = cfg.document.entities.get(0);
- MockDataSource.setIterator("select * from x", new ArrayList().iterator());
- MockDataSource.setIterator("select id from x", new ArrayList().iterator());
- ent.dataSrc = new MockDataSource();
- ent.isDocRoot = true;
- DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "delta-import"));
- SolrWriterImpl swi = new SolrWriterImpl();
- di.runCmd(rp, swi);
- Assert.assertEquals(Boolean.FALSE, swi.deleteAllCalled);
- Assert.assertEquals(Boolean.FALSE, swi.commitCalled);
- Assert.assertEquals(0, swi.docs.size());
- Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
- Assert.assertEquals(0, di.getDocBuilder().importStatistics.docCount.get());
- Assert.assertEquals(0, di.getDocBuilder().importStatistics.rowsCount.get());
- } finally {
- MockDataSource.clearCache();
- }
+ DataImporter di = new DataImporter();
+ di.loadAndInit(dc_deltaConfig);
+ DataConfig cfg = di.getConfig();
+ DataConfig.Entity ent = cfg.document.entities.get(0);
+ MockDataSource.setIterator("select * from x", new ArrayList().iterator());
+ MockDataSource.setIterator("select id from x", new ArrayList().iterator());
+ ent.dataSrc = new MockDataSource();
+ ent.isDocRoot = true;
+ DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "delta-import"));
+ SolrWriterImpl swi = new SolrWriterImpl();
+ di.runCmd(rp, swi);
+ assertEquals(Boolean.FALSE, swi.deleteAllCalled);
+ assertEquals(Boolean.FALSE, swi.commitCalled);
+ assertEquals(0, swi.docs.size());
+ assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
+ assertEquals(0, di.getDocBuilder().importStatistics.docCount.get());
+ assertEquals(0, di.getDocBuilder().importStatistics.rowsCount.get());
}
@Test
public void singleEntityOneRow() {
- try {
- DataImporter di = new DataImporter();
- di.loadAndInit(dc_singleEntity);
- DataConfig cfg = di.getConfig();
- DataConfig.Entity ent = cfg.document.entities.get(0);
- List l = new ArrayList();
- l.add(createMap("id", 1, "desc", "one"));
- MockDataSource.setIterator("select * from x", l.iterator());
- ent.dataSrc = new MockDataSource();
- ent.isDocRoot = true;
- DataImporter.RequestParams rp = new DataImporter.RequestParams();
- rp.command = "full-import";
- SolrWriterImpl swi = new SolrWriterImpl();
- di.runCmd(rp, swi);
- Assert.assertEquals(Boolean.TRUE, swi.deleteAllCalled);
- Assert.assertEquals(Boolean.TRUE, swi.commitCalled);
- Assert.assertEquals(1, swi.docs.size());
- Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount
- .get());
- Assert
- .assertEquals(1, di.getDocBuilder().importStatistics.docCount.get());
- Assert.assertEquals(1, di.getDocBuilder().importStatistics.rowsCount
- .get());
-
- for (int i = 0; i < l.size(); i++) {
- Map<String, Object> map = (Map<String, Object>) l.get(i);
- SolrInputDocument doc = swi.docs.get(i);
- for (Map.Entry<String, Object> entry : map.entrySet()) {
- Assert.assertEquals(entry.getValue(), doc.getFieldValue(entry
- .getKey()));
- }
+ DataImporter di = new DataImporter();
+ di.loadAndInit(dc_singleEntity);
+ DataConfig cfg = di.getConfig();
+ DataConfig.Entity ent = cfg.document.entities.get(0);
+ List l = new ArrayList();
+ l.add(createMap("id", 1, "desc", "one"));
+ MockDataSource.setIterator("select * from x", l.iterator());
+ ent.dataSrc = new MockDataSource();
+ ent.isDocRoot = true;
+ DataImporter.RequestParams rp = new DataImporter.RequestParams();
+ rp.command = "full-import";
+ SolrWriterImpl swi = new SolrWriterImpl();
+ di.runCmd(rp, swi);
+ assertEquals(Boolean.TRUE, swi.deleteAllCalled);
+ assertEquals(Boolean.TRUE, swi.commitCalled);
+ assertEquals(1, swi.docs.size());
+ assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
+ assertEquals(1, di.getDocBuilder().importStatistics.docCount.get());
+ assertEquals(1, di.getDocBuilder().importStatistics.rowsCount.get());
+
+ for (int i = 0; i < l.size(); i++) {
+ Map<String, Object> map = (Map<String, Object>) l.get(i);
+ SolrInputDocument doc = swi.docs.get(i);
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
}
- } finally {
- MockDataSource.clearCache();
}
}
@Test
public void testImportCommand() {
- try {
- DataImporter di = new DataImporter();
- di.loadAndInit(dc_singleEntity);
- DataConfig cfg = di.getConfig();
- DataConfig.Entity ent = cfg.document.entities.get(0);
- List l = new ArrayList();
- l.add(createMap("id", 1, "desc", "one"));
- MockDataSource.setIterator("select * from x", l.iterator());
- ent.dataSrc = new MockDataSource();
- ent.isDocRoot = true;
- DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "import"));
- SolrWriterImpl swi = new SolrWriterImpl();
- di.runCmd(rp, swi);
- Assert.assertEquals(Boolean.FALSE, swi.deleteAllCalled);
- Assert.assertEquals(Boolean.TRUE, swi.commitCalled);
- Assert.assertEquals(1, swi.docs.size());
- Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount
- .get());
- Assert
- .assertEquals(1, di.getDocBuilder().importStatistics.docCount.get());
- Assert.assertEquals(1, di.getDocBuilder().importStatistics.rowsCount
- .get());
-
- for (int i = 0; i < l.size(); i++) {
- Map<String, Object> map = (Map<String, Object>) l.get(i);
- SolrInputDocument doc = swi.docs.get(i);
- for (Map.Entry<String, Object> entry : map.entrySet()) {
- Assert.assertEquals(entry.getValue(), doc.getFieldValue(entry
- .getKey()));
- }
+ DataImporter di = new DataImporter();
+ di.loadAndInit(dc_singleEntity);
+ DataConfig cfg = di.getConfig();
+ DataConfig.Entity ent = cfg.document.entities.get(0);
+ List l = new ArrayList();
+ l.add(createMap("id", 1, "desc", "one"));
+ MockDataSource.setIterator("select * from x", l.iterator());
+ ent.dataSrc = new MockDataSource();
+ ent.isDocRoot = true;
+ DataImporter.RequestParams rp = new DataImporter.RequestParams(createMap("command", "import"));
+ SolrWriterImpl swi = new SolrWriterImpl();
+ di.runCmd(rp, swi);
+ assertEquals(Boolean.FALSE, swi.deleteAllCalled);
+ assertEquals(Boolean.TRUE, swi.commitCalled);
+ assertEquals(1, swi.docs.size());
+ assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
+ assertEquals(1, di.getDocBuilder().importStatistics.docCount.get());
+ assertEquals(1, di.getDocBuilder().importStatistics.rowsCount.get());
+
+ for (int i = 0; i < l.size(); i++) {
+ Map<String, Object> map = (Map<String, Object>) l.get(i);
+ SolrInputDocument doc = swi.docs.get(i);
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
}
- } finally {
- MockDataSource.clearCache();
}
}
@Test
public void singleEntityMultipleRows() {
- try {
- DataImporter di = new DataImporter();
- di.loadAndInit(dc_singleEntity);
- DataConfig cfg = di.getConfig();
- DataConfig.Entity ent = cfg.document.entities.get(0);
- ent.isDocRoot = true;
- DataImporter.RequestParams rp = new DataImporter.RequestParams();
- rp.command = "full-import";
- List l = new ArrayList();
- l.add(createMap("id", 1, "desc", "one"));
- l.add(createMap("id", 2, "desc", "two"));
- l.add(createMap("id", 3, "desc", "three"));
-
- MockDataSource.setIterator("select * from x", l.iterator());
- ent.dataSrc = new MockDataSource();
- SolrWriterImpl swi = new SolrWriterImpl();
- di.runCmd(rp, swi);
- Assert.assertEquals(Boolean.TRUE, swi.deleteAllCalled);
- Assert.assertEquals(Boolean.TRUE, swi.commitCalled);
- Assert.assertEquals(3, swi.docs.size());
- for (int i = 0; i < l.size(); i++) {
- Map<String, Object> map = (Map<String, Object>) l.get(i);
- SolrInputDocument doc = swi.docs.get(i);
- for (Map.Entry<String, Object> entry : map.entrySet()) {
- Assert.assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
- }
- Assert.assertEquals(map.get("desc"), doc.getFieldValue("desc_s"));
+ DataImporter di = new DataImporter();
+ di.loadAndInit(dc_singleEntity);
+ DataConfig cfg = di.getConfig();
+ DataConfig.Entity ent = cfg.document.entities.get(0);
+ ent.isDocRoot = true;
+ DataImporter.RequestParams rp = new DataImporter.RequestParams();
+ rp.command = "full-import";
+ List l = new ArrayList();
+ l.add(createMap("id", 1, "desc", "one"));
+ l.add(createMap("id", 2, "desc", "two"));
+ l.add(createMap("id", 3, "desc", "three"));
+
+ MockDataSource.setIterator("select * from x", l.iterator());
+ ent.dataSrc = new MockDataSource();
+ SolrWriterImpl swi = new SolrWriterImpl();
+ di.runCmd(rp, swi);
+ assertEquals(Boolean.TRUE, swi.deleteAllCalled);
+ assertEquals(Boolean.TRUE, swi.commitCalled);
+ assertEquals(3, swi.docs.size());
+ for (int i = 0; i < l.size(); i++) {
+ Map<String, Object> map = (Map<String, Object>) l.get(i);
+ SolrInputDocument doc = swi.docs.get(i);
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ assertEquals(entry.getValue(), doc.getFieldValue(entry.getKey()));
}
- Assert.assertEquals(1, di.getDocBuilder().importStatistics.queryCount
- .get());
- Assert
- .assertEquals(3, di.getDocBuilder().importStatistics.docCount.get());
- Assert.assertEquals(3, di.getDocBuilder().importStatistics.rowsCount
- .get());
- } finally {
- MockDataSource.clearCache();
+ assertEquals(map.get("desc"), doc.getFieldValue("desc_s"));
}
+ assertEquals(1, di.getDocBuilder().importStatistics.queryCount.get());
+ assertEquals(3, di.getDocBuilder().importStatistics.docCount.get());
+ assertEquals(3, di.getDocBuilder().importStatistics.rowsCount.get());
}
static class SolrWriterImpl extends SolrWriter {