You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by sy...@apache.org on 2016/10/02 15:02:32 UTC

[06/10] lucenenet git commit: Fixed bug in Analysis.Util.TestCharTokenizers.TestReadSupplementaryChars() that caused the test to fail randomly. Array elements caused by doubled space characters were not being properly excluded by the string.Split() funct

Fixed bug in Analysis.Util.TestCharTokenizers.TestReadSupplementaryChars() that caused the test to fail randomly. Array elements caused by doubled space characters were not being properly excluded by the string.Split() function.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/67cf947c
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/67cf947c
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/67cf947c

Branch: refs/heads/master
Commit: 67cf947ce644e9953b76592eabd9ea343ec76b61
Parents: 220e0fb
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Fri Sep 30 18:32:51 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sun Oct 2 21:44:07 2016 +0700

----------------------------------------------------------------------
 .../Analysis/Util/TestCharTokenizers.cs                            | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/67cf947c/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
index 7c68fbc..78a5772 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Util/TestCharTokenizers.cs
@@ -57,7 +57,7 @@ namespace Lucene.Net.Analysis.Util
             // internal buffer size is 1024 make sure we have a surrogate pair right at the border
             builder.Insert(1023, "\ud801\udc1c");
             var tokenizer = new LowerCaseTokenizer(TEST_VERSION_CURRENT, new StringReader(builder.ToString()));
-            AssertTokenStreamContents(tokenizer, builder.ToString().ToLowerInvariant().Split(' '));
+            AssertTokenStreamContents(tokenizer, builder.ToString().ToLowerInvariant().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
         }
 
         /*