You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2020/07/21 09:48:38 UTC
[lucenenet] 04/24: BREAKING:
Lucene.Net.Analysis.Common.Analysis.Compound: Changed protected m_tokens
field from LinkedList to Queue for better throughput
This is an automated email from the ASF dual-hosted git repository.
nightowl888 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucenenet.git
commit 626ab8232ccb9b5a1320cf3bcc6faa90c91b743f
Author: Shad Storhaug <sh...@shadstorhaug.com>
AuthorDate: Fri Jul 17 09:00:56 2020 +0700
BREAKING: Lucene.Net.Analysis.Common.Analysis.Compound: Changed protected m_tokens field from LinkedList to Queue for better throughput
---
.../Analysis/Compound/CompoundWordTokenFilterBase.cs | 7 +++----
.../Analysis/Compound/DictionaryCompoundWordTokenFilter.cs | 4 ++--
.../Analysis/Compound/HyphenationCompoundWordTokenFilter.cs | 6 +++---
3 files changed, 8 insertions(+), 9 deletions(-)
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs
index a38ffad..2c028e5 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/CompoundWordTokenFilterBase.cs
@@ -56,7 +56,7 @@ namespace Lucene.Net.Analysis.Compound
protected readonly LuceneVersion m_matchVersion;
protected readonly CharArraySet m_dictionary;
- protected readonly LinkedList<CompoundToken> m_tokens;
+ protected readonly Queue<CompoundToken> m_tokens;
protected readonly int m_minWordSize;
protected readonly int m_minSubwordSize;
protected readonly int m_maxSubwordSize;
@@ -86,7 +86,7 @@ namespace Lucene.Net.Analysis.Compound
posIncAtt = AddAttribute<IPositionIncrementAttribute>();
this.m_matchVersion = matchVersion;
- this.m_tokens = new LinkedList<CompoundToken>();
+ this.m_tokens = new Queue<CompoundToken>();
if (minWordSize < 0)
{
throw new ArgumentException("minWordSize cannot be negative");
@@ -111,8 +111,7 @@ namespace Lucene.Net.Analysis.Compound
if (m_tokens.Count > 0)
{
Debug.Assert(current != null);
- CompoundToken token = m_tokens.First.Value;
- m_tokens.Remove(token);
+ CompoundToken token = m_tokens.Dequeue();
RestoreState(current); // keep all other attributes untouched
m_termAtt.SetEmpty().Append(token.Text);
m_offsetAtt.SetOffset(token.StartOffset, token.EndOffset);
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilter.cs
index 76f6077..0cc0505 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/DictionaryCompoundWordTokenFilter.cs
@@ -120,13 +120,13 @@ namespace Lucene.Net.Analysis.Compound
}
else
{
- m_tokens.AddLast(new CompoundToken(this, i, j));
+ m_tokens.Enqueue(new CompoundToken(this, i, j));
}
}
}
if (this.m_onlyLongestMatch && longestMatchToken != null)
{
- m_tokens.AddLast(longestMatchToken);
+ m_tokens.Enqueue(longestMatchToken);
}
}
}
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilter.cs
index 0e263ed..e7a7726 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Compound/HyphenationCompoundWordTokenFilter.cs
@@ -250,7 +250,7 @@ namespace Lucene.Net.Analysis.Compound
}
else
{
- m_tokens.AddLast(new CompoundToken(this, start, partLength));
+ m_tokens.Enqueue(new CompoundToken(this, start, partLength));
}
}
else if (m_dictionary.Contains(m_termAtt.Buffer, start, partLength - 1))
@@ -275,13 +275,13 @@ namespace Lucene.Net.Analysis.Compound
}
else
{
- m_tokens.AddLast(new CompoundToken(this, start, partLength - 1));
+ m_tokens.Enqueue(new CompoundToken(this, start, partLength - 1));
}
}
}
if (this.m_onlyLongestMatch && longestMatchToken != null)
{
- m_tokens.AddLast(longestMatchToken);
+ m_tokens.Enqueue(longestMatchToken);
}
}
}