/// A basic 'positive' Unit test class for the FieldCacheRangeFilter class.
///
- ///
+ ///
/// NOTE: at the moment, this class only tests for 'positive' results,
/// it does not verify the results to ensure there are no 'false positives',
/// nor does it adequately test 'negative' results. It also does not test
@@ -47,8 +47,8 @@ public class TestFieldCacheRangeFilter : BaseTestRangeFilter
{
///
/// LUCENENET specific. Ensure we have an infostream attached to the default FieldCache
- /// when running the tests. In Java, this was done in the Core.Search.TestFieldCache.TestInfoStream()
- /// method (which polluted the state of these tests), but we need to make the tests self-contained
+ /// when running the tests. In Java, this was done in the Core.Search.TestFieldCache.TestInfoStream()
+ /// method (which polluted the state of these tests), but we need to make the tests self-contained
/// so they can be run correctly regardless of order. Not setting the InfoStream skips an execution
/// path within these tests, so we should do it to make sure we test all of the code.
///
@@ -501,7 +501,7 @@ public virtual void TestFieldCacheRangeFilterFloats()
int numDocs = reader.NumDocs;
float minIdO = Convert.ToSingle(minId + .5f);
- float medIdO = Convert.ToSingle((float)minIdO + ((maxId - minId)) / 2.0f);
+ float medIdO = Convert.ToSingle(minIdO + ((maxId - minId)) / 2.0f);
ScoreDoc[] result;
Query q = new TermQuery(new Term("body", "body"));
@@ -598,4 +598,4 @@ public virtual void TestSparseIndex()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
index 926c4ce9db..d045771dcb 100644
--- a/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheRewriteMethod.cs
@@ -34,10 +34,10 @@ public class TestFieldCacheRewriteMethod : TestRegexpRandom2
protected internal override void AssertSame(string regexp)
{
RegexpQuery fieldCache = new RegexpQuery(new Term(fieldName, regexp), RegExpSyntax.NONE);
- fieldCache.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+ fieldCache.MultiTermRewriteMethod = new FieldCacheRewriteMethod();
RegexpQuery filter = new RegexpQuery(new Term(fieldName, regexp), RegExpSyntax.NONE);
- filter.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+ filter.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
TopDocs fieldCacheDocs = searcher1.Search(fieldCache, 25);
TopDocs filterDocs = searcher2.Search(filter, 25);
@@ -54,19 +54,16 @@ public virtual void TestEquals()
Assert.AreEqual(a1, a2);
Assert.IsFalse(a1.Equals(b));
- a1.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
- a2.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
- b.MultiTermRewriteMethod = (new FieldCacheRewriteMethod());
+ a1.MultiTermRewriteMethod = new FieldCacheRewriteMethod();
+ a2.MultiTermRewriteMethod = new FieldCacheRewriteMethod();
+ b.MultiTermRewriteMethod = new FieldCacheRewriteMethod();
Assert.AreEqual(a1, a2);
Assert.IsFalse(a1.Equals(b));
QueryUtils.Check(a1);
}
-
-
- #region TestSnapshotDeletionPolicy
// LUCENENET NOTE: Tests in a base class are not pulled into the correct
- // context in Visual Studio. This fixes that with the minimum amount of code necessary
+ // context in Visual Studio or Azure DevOps. This fixes that with the minimum amount of code necessary
// to run them in the correct context without duplicating all of the tests.
///
@@ -76,7 +73,5 @@ public override void TestRegexps()
{
base.TestRegexps();
}
-
- #endregion
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
index 6c4321e9ef..cebc3bf6a7 100644
--- a/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
+++ b/src/Lucene.Net.Tests/Search/TestFieldCacheTermsFilter.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.Search
///
/// A basic unit test for FieldCacheTermsFilter
///
- ///
+ ///
[TestFixture]
public class TestFieldCacheTermsFilter : LuceneTestCase
{
@@ -77,4 +77,4 @@ public virtual void TestMissingTerms()
rd.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs b/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
index 753332e1ce..bdd6da0036 100644
--- a/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
+++ b/src/Lucene.Net.Tests/Search/TestFieldValueFilter.cs
@@ -32,7 +32,6 @@ namespace Lucene.Net.Search
using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
using Term = Lucene.Net.Index.Term;
- ///
[TestFixture]
public class TestFieldValueFilter : LuceneTestCase
{
@@ -124,4 +123,4 @@ private int[] BuildIndex(RandomIndexWriter writer, int docs)
return docStates;
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
index a5c28bdce1..6b45085e01 100644
--- a/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestFilteredQuery.cs
@@ -46,7 +46,7 @@ namespace Lucene.Net.Search
///
/// FilteredQuery JUnit tests.
///
- /// Created: Apr 21, 2004 1:21:46 PM
+ /// Created: Apr 21, 2004 1:21:46 PM
///
///
/// @since 1.4
@@ -109,10 +109,6 @@ private static Filter NewStaticFilterB()
private sealed class FilterAnonymousClass : Filter
{
- public FilterAnonymousClass()
- {
- }
-
public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
{
if (acceptDocs is null)
@@ -212,10 +208,6 @@ private static Filter NewStaticFilterA()
private sealed class FilterAnonymousClass2 : Filter
{
- public FilterAnonymousClass2()
- {
- }
-
public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
{
Assert.IsNull(acceptDocs, "acceptDocs should be null, as we have an index without deletions");
@@ -458,10 +450,6 @@ private static FilteredQuery.FilterStrategy RandomFilterStrategy(Random random,
private sealed class RandomAccessFilterStrategyAnonymousClass : FilteredQuery.RandomAccessFilterStrategy
{
- public RandomAccessFilterStrategyAnonymousClass()
- {
- }
-
protected override bool UseRandomAccess(IBits bits, int firstFilterDoc)
{
return true;
@@ -495,7 +483,7 @@ public virtual void TestQueryFirstFilterStrategy()
writer.Dispose();
IndexSearcher searcher = NewSearcher(reader);
- Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousClass3(this, reader), FilteredQuery.QUERY_FIRST_FILTER_STRATEGY);
+ Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousClass3(), FilteredQuery.QUERY_FIRST_FILTER_STRATEGY);
TopDocs search = searcher.Search(query, 10);
Assert.AreEqual(totalDocsWithZero, search.TotalHits);
@@ -504,16 +492,6 @@ public virtual void TestQueryFirstFilterStrategy()
private sealed class FilterAnonymousClass3 : Filter
{
- private readonly TestFilteredQuery outerInstance;
-
- private IndexReader reader;
-
- public FilterAnonymousClass3(TestFilteredQuery outerInstance, IndexReader reader)
- {
- this.outerInstance = outerInstance;
- this.reader = reader;
- }
-
public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
{
bool nullBitset = Random.Next(10) == 5;
@@ -529,20 +507,17 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo
{
bitSet.Set(d);
}
- return new DocIdSetAnonymousClass(this, nullBitset, reader, bitSet);
+ return new DocIdSetAnonymousClass(nullBitset, reader, bitSet);
}
private sealed class DocIdSetAnonymousClass : DocIdSet
{
- private readonly FilterAnonymousClass3 outerInstance;
-
private readonly bool nullBitset;
private readonly AtomicReader reader;
private readonly BitSet bitSet;
- public DocIdSetAnonymousClass(FilterAnonymousClass3 outerInstance, bool nullBitset, AtomicReader reader, BitSet bitSet)
+ public DocIdSetAnonymousClass(bool nullBitset, AtomicReader reader, BitSet bitSet)
{
- this.outerInstance = outerInstance;
this.nullBitset = nullBitset;
this.reader = reader;
this.bitSet = bitSet;
@@ -613,7 +588,7 @@ public virtual void TestLeapFrogStrategy()
writer.Dispose();
bool queryFirst = Random.NextBoolean();
IndexSearcher searcher = NewSearcher(reader);
- Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousClass4(this, queryFirst), queryFirst ? FilteredQuery.LEAP_FROG_QUERY_FIRST_STRATEGY : Random
+ Query query = new FilteredQuery(new TermQuery(new Term("field", "0")), new FilterAnonymousClass4(queryFirst), queryFirst ? FilteredQuery.LEAP_FROG_QUERY_FIRST_STRATEGY : Random
.NextBoolean() ? FilteredQuery.RANDOM_ACCESS_FILTER_STRATEGY : FilteredQuery.LEAP_FROG_FILTER_FIRST_STRATEGY); // if filterFirst, we can use random here since bits are null
TopDocs search = searcher.Search(query, 10);
@@ -623,13 +598,10 @@ public virtual void TestLeapFrogStrategy()
private sealed class FilterAnonymousClass4 : Filter
{
- private readonly TestFilteredQuery outerInstance;
-
private readonly bool queryFirst;
- public FilterAnonymousClass4(TestFilteredQuery outerInstance, bool queryFirst)
+ public FilterAnonymousClass4(bool queryFirst)
{
- this.outerInstance = outerInstance;
this.queryFirst = queryFirst;
}
@@ -701,4 +673,4 @@ public override long GetCost()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs b/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
index d3919c80b2..fad464498d 100644
--- a/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
+++ b/src/Lucene.Net.Tests/Search/TestIndexSearcher.cs
@@ -68,13 +68,34 @@ public override void TearDown()
[Test]
public virtual void TestHugeN()
{
+ // LUCENENET: this differs from Java
TaskScheduler service = new LimitedConcurrencyLevelTaskScheduler(4);
- IndexSearcher[] searchers = new IndexSearcher[] { new IndexSearcher(reader), new IndexSearcher(reader, service) };
- Query[] queries = new Query[] { new MatchAllDocsQuery(), new TermQuery(new Term("field", "1")) };
- Sort[] sorts = new Sort[] { null, new Sort(new SortField("field2", SortFieldType.STRING)) };
- Filter[] filters = new Filter[] { null, new QueryWrapperFilter(new TermQuery(new Term("field2", "true"))) };
- ScoreDoc[] afters = new ScoreDoc[] { null, new FieldDoc(0, 0f, new object[] { new BytesRef("boo!") }) };
+ IndexSearcher[] searchers = new IndexSearcher[]
+ {
+ new IndexSearcher(reader),
+ new IndexSearcher(reader, service)
+ };
+ Query[] queries = new Query[]
+ {
+ new MatchAllDocsQuery(),
+ new TermQuery(new Term("field", "1"))
+ };
+ Sort[] sorts = new Sort[]
+ {
+ null,
+ new Sort(new SortField("field2", SortFieldType.STRING))
+ };
+ Filter[] filters = new Filter[]
+ {
+ null,
+ new QueryWrapperFilter(new TermQuery(new Term("field2", "true")))
+ };
+ ScoreDoc[] afters = new ScoreDoc[]
+ {
+ null,
+ new FieldDoc(0, 0f, new object[] { new BytesRef("boo!") })
+ };
foreach (IndexSearcher searcher in searchers)
{
@@ -140,4 +161,4 @@ public virtual void TestSearchAfterPassedMaxDoc()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
index 39da0803c5..3c085e967c 100644
--- a/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
+++ b/src/Lucene.Net.Tests/Search/TestLiveFieldValues.cs
@@ -9,7 +9,10 @@
using JCG = J2N.Collections.Generic;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
+
+#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE
using RandomizedTesting.Generators;
+#endif
namespace Lucene.Net.Search
{
@@ -245,4 +248,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
index 6559f79cd1..b202e9b1d5 100644
--- a/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
+++ b/src/Lucene.Net.Tests/Search/TestMinShouldMatch2.cs
@@ -2,11 +2,9 @@
using Lucene.Net.Diagnostics;
using Lucene.Net.Documents;
using Lucene.Net.Index;
-using Lucene.Net.Support;
using Lucene.Net.Util;
using NUnit.Framework;
using System.Collections.Generic;
-using System.Linq;
using Assert = Lucene.Net.TestFramework.Assert;
using JCG = J2N.Collections.Generic;
@@ -101,12 +99,15 @@ public override void BeforeClass()
r = DirectoryReader.Open(dir);
atomicReader = GetOnlySegmentReader(r);
searcher = new IndexSearcher(atomicReader);
- searcher.Similarity = new DefaultSimilarityAnonymousClass();
+ searcher.Similarity = DefaultSimilarityAnonymousClass.Default;
}
private sealed class DefaultSimilarityAnonymousClass : DefaultSimilarity
{
- public DefaultSimilarityAnonymousClass()
+ // LUCENENET: making a static readonly instance with private constructor for reduced allocations
+ public static readonly DefaultSimilarityAnonymousClass Default = new DefaultSimilarityAnonymousClass();
+
+ private DefaultSimilarityAnonymousClass()
{
}
@@ -371,7 +372,7 @@ internal SlowMinShouldMatchScorer(BooleanWeight weight, AtomicReader reader, Ind
if (Debugging.AssertsEnabled) Debugging.Assert(success); // no dups
TermContext context = TermContext.Build(reader.Context, term);
SimWeight w = weight.Similarity.ComputeWeight(1f, searcher.CollectionStatistics("field"), searcher.TermStatistics(term, context));
- var dummy = w.GetValueForNormalization(); // ignored
+ _ = w.GetValueForNormalization(); // ignored
w.Normalize(1F, 1F);
sims[(int)ord] = weight.Similarity.GetSimScorer(w, (AtomicReaderContext)reader.Context);
}
@@ -428,4 +429,4 @@ public override long GetCost()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
index fd502af60c..135462d8b5 100644
--- a/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestMultiPhraseQuery.cs
@@ -3,7 +3,6 @@
using NUnit.Framework;
using System;
using System.Collections.Generic;
-using System.Linq;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
@@ -354,7 +353,7 @@ private void Add(string s, string type, RandomIndexWriter writer)
[Test]
public virtual void TestEmptyToString()
{
- (new MultiPhraseQuery()).ToString();
+ _ = new MultiPhraseQuery().ToString();
}
[Test]
@@ -367,7 +366,7 @@ public virtual void TestCustomIDF()
IndexReader reader = writer.GetReader();
IndexSearcher searcher = NewSearcher(reader);
- searcher.Similarity = new DefaultSimilarityAnonymousClass(this);
+ searcher.Similarity = new DefaultSimilarityAnonymousClass();
MultiPhraseQuery query = new MultiPhraseQuery();
query.Add(new Term[] { new Term("body", "this"), new Term("body", "that") });
@@ -382,13 +381,6 @@ public virtual void TestCustomIDF()
private sealed class DefaultSimilarityAnonymousClass : DefaultSimilarity
{
- private readonly TestMultiPhraseQuery outerInstance;
-
- public DefaultSimilarityAnonymousClass(TestMultiPhraseQuery outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public override Explanation IdfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats)
{
return new Explanation(10f, "just a test");
@@ -462,13 +454,45 @@ private static Token MakeToken(string text, int posIncr)
return t;
}
- private static readonly Token[] INCR_0_DOC_TOKENS = new Token[] { MakeToken("x", 1), MakeToken("a", 1), MakeToken("1", 0), MakeToken("m", 1), MakeToken("b", 1), MakeToken("1", 0), MakeToken("n", 1), MakeToken("c", 1), MakeToken("y", 1) };
-
- private static readonly Token[] INCR_0_QUERY_TOKENS_AND = new Token[] { MakeToken("a", 1), MakeToken("1", 0), MakeToken("b", 1), MakeToken("1", 0), MakeToken("c", 1) };
-
- private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_MATCH = new Token[][] { new Token[] { MakeToken("a", 1) }, new Token[] { MakeToken("x", 1), MakeToken("1", 0) }, new Token[] { MakeToken("b", 2) }, new Token[] { MakeToken("x", 2), MakeToken("1", 0) }, new Token[] { MakeToken("c", 3) } };
-
- private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_NO_MATCHN = new Token[][] { new Token[] { MakeToken("x", 1) }, new Token[] { MakeToken("a", 1), MakeToken("1", 0) }, new Token[] { MakeToken("x", 2) }, new Token[] { MakeToken("b", 2), MakeToken("1", 0) }, new Token[] { MakeToken("c", 3) } };
+ private static readonly Token[] INCR_0_DOC_TOKENS = new Token[]
+ {
+ MakeToken("x", 1),
+ MakeToken("a", 1),
+ MakeToken("1", 0),
+ MakeToken("m", 1), // not existing, relying on slop=2
+ MakeToken("b", 1),
+ MakeToken("1", 0),
+ MakeToken("n", 1), // not existing, relying on slop=2
+ MakeToken("c", 1),
+ MakeToken("y", 1)
+ };
+
+ private static readonly Token[] INCR_0_QUERY_TOKENS_AND = new Token[]
+ {
+ MakeToken("a", 1),
+ MakeToken("1", 0),
+ MakeToken("b", 1),
+ MakeToken("1", 0),
+ MakeToken("c", 1)
+ };
+
+ private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_MATCH = new Token[][]
+ {
+ new[] { MakeToken("a", 1) },
+ new[] { MakeToken("x", 1), MakeToken("1", 0) },
+ new[] { MakeToken("b", 2) },
+ new[] { MakeToken("x", 2), MakeToken("1", 0) },
+ new[] { MakeToken("c", 3) }
+ };
+
+ private static readonly Token[][] INCR_0_QUERY_TOKENS_AND_OR_NO_MATCHN = new Token[][]
+ {
+ new[] { MakeToken("x", 1) },
+ new[] { MakeToken("a", 1), MakeToken("1", 0) },
+ new[] { MakeToken("x", 2) },
+ new[] { MakeToken("b", 2), MakeToken("1", 0) },
+ new[] { MakeToken("c", 3) }
+ };
///
/// using query parser, MPQ will be created, and will not be strict about having all query terms
@@ -626,4 +650,4 @@ public virtual void TestNegativeSlop()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
index 024551c898..621313f030 100644
--- a/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
+++ b/src/Lucene.Net.Tests/Search/TestMultiTermConstantScore.cs
@@ -50,10 +50,6 @@ public class TestMultiTermConstantScore : BaseTestRangeFilter
internal /*static*/ Directory small;
internal /*static*/ IndexReader reader;
- ///
- /// LUCENENET specific
- /// Is non-static because NewIndexWriterConfig is no longer static.
- ///
[OneTimeSetUp]
public override void BeforeClass()
{
@@ -95,7 +91,7 @@ public override void AfterClass()
public static Query Csrq(string f, string l, string h, bool il, bool ih)
{
TermRangeQuery query = TermRangeQuery.NewStringRange(f, l, h, il, ih);
- query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+ query.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
if (Verbose)
{
Console.WriteLine("TEST: query=" + query);
@@ -106,7 +102,7 @@ public static Query Csrq(string f, string l, string h, bool il, bool ih)
public static Query Csrq(string f, string l, string h, bool il, bool ih, MultiTermQuery.RewriteMethod method)
{
TermRangeQuery query = TermRangeQuery.NewStringRange(f, l, h, il, ih);
- query.MultiTermRewriteMethod = (method);
+ query.MultiTermRewriteMethod = method;
if (Verbose)
{
Console.WriteLine("TEST: query=" + query + " method=" + method);
@@ -119,7 +115,7 @@ public static Query Csrq(string f, string l, string h, bool il, bool ih, MultiTe
public static Query Cspq(Term prefix)
{
PrefixQuery query = new PrefixQuery(prefix);
- query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+ query.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
return query;
}
@@ -128,7 +124,7 @@ public static Query Cspq(Term prefix)
public static Query Cswcq(Term wild)
{
WildcardQuery query = new WildcardQuery(wild);
- query.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+ query.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
return query;
}
@@ -242,7 +238,7 @@ public virtual void TestBoost()
search.Similarity = new DefaultSimilarity();
Query q = Csrq("data", "1", "6", T, T);
q.Boost = 100;
- search.Search(q, null, new CollectorAnonymousClass(this));
+ search.Search(q, null, new CollectorAnonymousClass());
//
// Ensure that boosting works to score one clause of a query higher
@@ -287,15 +283,7 @@ public virtual void TestBoost()
private sealed class CollectorAnonymousClass : ICollector
{
- private readonly TestMultiTermConstantScore outerInstance;
-
- public CollectorAnonymousClass(TestMultiTermConstantScore outerInstance)
- {
- this.outerInstance = outerInstance;
- @base = 0;
- }
-
- private int @base;
+ private int @base = 0;
private Scorer scorer;
public void SetScorer(Scorer scorer)
@@ -544,4 +532,4 @@ public virtual void TestRangeQueryRand()
assertEquals("max,nul,T,T", 1, result.Length);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
index 78a243ec48..d9533117ea 100644
--- a/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
+++ b/src/Lucene.Net.Tests/Search/TestMultiTermQueryRewrites.cs
@@ -137,7 +137,7 @@ private void CheckBooleanQueryOrder(Query q)
private void CheckDuplicateTerms(MultiTermQuery.RewriteMethod method)
{
MultiTermQuery mtq = TermRangeQuery.NewStringRange("data", "2", "7", true, true);
- mtq.MultiTermRewriteMethod = (method);
+ mtq.MultiTermRewriteMethod = method;
Query q1 = searcher.Rewrite(mtq);
Query q2 = multiSearcher.Rewrite(mtq);
Query q3 = multiSearcherDupls.Rewrite(mtq);
@@ -184,8 +184,8 @@ private void CheckBooleanQueryBoosts(BooleanQuery bq)
private void CheckBoosts(MultiTermQuery.RewriteMethod method)
{
- MultiTermQuery mtq = new MultiTermQueryAnonymousClass(this);
- mtq.MultiTermRewriteMethod = (method);
+ MultiTermQuery mtq = new MultiTermQueryAnonymousClass();
+ mtq.MultiTermRewriteMethod = method;
Query q1 = searcher.Rewrite(mtq);
Query q2 = multiSearcher.Rewrite(mtq);
Query q3 = multiSearcherDupls.Rewrite(mtq);
@@ -205,27 +205,21 @@ private void CheckBoosts(MultiTermQuery.RewriteMethod method)
private sealed class MultiTermQueryAnonymousClass : MultiTermQuery
{
- private readonly TestMultiTermQueryRewrites outerInstance;
-
- public MultiTermQueryAnonymousClass(TestMultiTermQueryRewrites outerInstance)
+ public MultiTermQueryAnonymousClass()
: base("data")
{
- this.outerInstance = outerInstance;
}
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
{
- return new TermRangeTermsEnumAnonymousClass(this, terms.GetEnumerator(), new BytesRef("2"), new BytesRef("7"));
+ return new TermRangeTermsEnumAnonymousClass(terms.GetEnumerator(), new BytesRef("2"), new BytesRef("7"));
}
private sealed class TermRangeTermsEnumAnonymousClass : TermRangeTermsEnum
{
- private readonly MultiTermQueryAnonymousClass outerInstance;
-
- public TermRangeTermsEnumAnonymousClass(MultiTermQueryAnonymousClass outerInstance, TermsEnum iterator, BytesRef bref1, BytesRef bref2)
+ public TermRangeTermsEnumAnonymousClass(TermsEnum iterator, BytesRef bref1, BytesRef bref2)
: base(iterator, bref1, bref2, true, true)
{
- this.outerInstance = outerInstance;
boostAtt = Attributes.AddAttribute();
}
@@ -305,4 +299,4 @@ public virtual void TestMaxClauseLimitations()
CheckNoMaxClauseLimitation(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(1024));
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs
index 4d567936ec..0d06ec9d6a 100644
--- a/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs
+++ b/src/Lucene.Net.Tests/Search/TestMultiThreadTermVectors.cs
@@ -2,10 +2,8 @@
using Lucene.Net.Diagnostics;
using Lucene.Net.Documents;
using Lucene.Net.Index.Extensions;
-using Lucene.Net.Support.Threading;
using NUnit.Framework;
using System;
-using System.IO;
using System.Text;
using System.Threading;
using Console = Lucene.Net.Util.SystemConsole;
@@ -60,7 +58,7 @@ public override void SetUp()
customType.StoreTermVectors = true;
for (int i = 0; i < numDocs; i++)
{
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
Field fld = NewField("field", English.Int32ToEnglish(i), customType);
doc.Add(fld);
writer.AddDocument(doc);
@@ -98,8 +96,7 @@ public virtual void Test()
{
try
{
- ///
- /// close the opened reader
+ // close the opened reader
reader.Dispose();
}
catch (Exception ioe) when (ioe.IsIOException())
@@ -240,4 +237,4 @@ private void VerifyVector(TermsEnum vector, int num)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestNot.cs b/src/Lucene.Net.Tests/Search/TestNot.cs
index 0a267ba864..0d434cd680 100644
--- a/src/Lucene.Net.Tests/Search/TestNot.cs
+++ b/src/Lucene.Net.Tests/Search/TestNot.cs
@@ -31,8 +31,6 @@ namespace Lucene.Net.Search
///
/// Similarity unit test.
- ///
- ///
///
[TestFixture]
public class TestNot : LuceneTestCase
@@ -62,4 +60,4 @@ public virtual void TestNot_Mem()
store.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs
index 9560992ca4..a48c6c4e31 100644
--- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs
+++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery32.cs
@@ -74,7 +74,10 @@ public override void BeforeClass()
noDocs = AtLeast(4096);
distance = (1 << 30) / noDocs;
directory = NewDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 100, 1000)).SetMergePolicy(NewLogMergePolicy()));
+ RandomIndexWriter writer = new RandomIndexWriter(Random, directory,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 100, 1000))
+ .SetMergePolicy(NewLogMergePolicy()));
FieldType storedInt = new FieldType(Int32Field.TYPE_NOT_STORED);
storedInt.IsStored = true;
@@ -103,7 +106,14 @@ public override void BeforeClass()
FieldType unstoredInt2 = new FieldType(unstoredInt);
unstoredInt2.NumericPrecisionStep = 2;
- Int32Field field8 = new Int32Field("field8", 0, storedInt8), field4 = new Int32Field("field4", 0, storedInt4), field2 = new Int32Field("field2", 0, storedInt2), fieldNoTrie = new Int32Field("field" + int.MaxValue, 0, storedIntNone), ascfield8 = new Int32Field("ascfield8", 0, unstoredInt8), ascfield4 = new Int32Field("ascfield4", 0, unstoredInt4), ascfield2 = new Int32Field("ascfield2", 0, unstoredInt2);
+ Int32Field
+ field8 = new Int32Field("field8", 0, storedInt8),
+ field4 = new Int32Field("field4", 0, storedInt4),
+ field2 = new Int32Field("field2", 0, storedInt2),
+ fieldNoTrie = new Int32Field("field" + int.MaxValue, 0, storedIntNone),
+ ascfield8 = new Int32Field("ascfield8", 0, unstoredInt8),
+ ascfield4 = new Int32Field("ascfield4", 0, unstoredInt4),
+ ascfield2 = new Int32Field("ascfield2", 0, unstoredInt2);
Document doc = new Document();
// add fields, that have a distance to test general functionality
@@ -225,11 +235,11 @@ public virtual void TestInverseRange()
{
AtomicReaderContext context = (AtomicReaderContext)SlowCompositeReaderWrapper.Wrap(reader).Context;
NumericRangeFilter f = NumericRangeFilter.NewInt32Range("field8", 8, 1000, -1000, true, true);
- Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A inverse range should return the null instance");
+ Assert.IsNull(f.GetDocIdSet(context, context.AtomicReader.LiveDocs), "A inverse range should return the null instance");
f = NumericRangeFilter.NewInt32Range("field8", 8, int.MaxValue, null, false, false);
- Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range starting with Integer.MAX_VALUE should return the null instance");
+ Assert.IsNull(f.GetDocIdSet(context, context.AtomicReader.LiveDocs), "A exclusive range starting with Integer.MAX_VALUE should return the null instance");
f = NumericRangeFilter.NewInt32Range("field8", 8, null, int.MinValue, false, false);
- Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range ending with Integer.MIN_VALUE should return the null instance");
+ Assert.IsNull(f.GetDocIdSet(context, context.AtomicReader.LiveDocs), "A exclusive range ending with Integer.MIN_VALUE should return the null instance");
}
[Test]
@@ -707,4 +717,4 @@ public virtual void TestEqualsAndHash()
Assert.IsFalse(q2.Equals(q1));
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs
index 1bb227cb6c..71b7f7c72b 100644
--- a/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs
+++ b/src/Lucene.Net.Tests/Search/TestNumericRangeQuery64.cs
@@ -74,7 +74,10 @@ public override void BeforeClass()
noDocs = AtLeast(4096);
distance = (1L << 60) / noDocs;
directory = NewDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 100, 1000)).SetMergePolicy(NewLogMergePolicy()));
+ RandomIndexWriter writer = new RandomIndexWriter(Random, directory,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 100, 1000))
+ .SetMergePolicy(NewLogMergePolicy()));
FieldType storedLong = new FieldType(Int64Field.TYPE_NOT_STORED);
storedLong.IsStored = true;
@@ -109,7 +112,16 @@ public override void BeforeClass()
FieldType unstoredLong2 = new FieldType(unstoredLong);
unstoredLong2.NumericPrecisionStep = 2;
- Int64Field field8 = new Int64Field("field8", 0L, storedLong8), field6 = new Int64Field("field6", 0L, storedLong6), field4 = new Int64Field("field4", 0L, storedLong4), field2 = new Int64Field("field2", 0L, storedLong2), fieldNoTrie = new Int64Field("field" + int.MaxValue, 0L, storedLongNone), ascfield8 = new Int64Field("ascfield8", 0L, unstoredLong8), ascfield6 = new Int64Field("ascfield6", 0L, unstoredLong6), ascfield4 = new Int64Field("ascfield4", 0L, unstoredLong4), ascfield2 = new Int64Field("ascfield2", 0L, unstoredLong2);
+ Int64Field
+ field8 = new Int64Field("field8", 0L, storedLong8),
+ field6 = new Int64Field("field6", 0L, storedLong6),
+ field4 = new Int64Field("field4", 0L, storedLong4),
+ field2 = new Int64Field("field2", 0L, storedLong2),
+ fieldNoTrie = new Int64Field("field" + int.MaxValue, 0L, storedLongNone),
+ ascfield8 = new Int64Field("ascfield8", 0L, unstoredLong8),
+ ascfield6 = new Int64Field("ascfield6", 0L, unstoredLong6),
+ ascfield4 = new Int64Field("ascfield4", 0L, unstoredLong4),
+ ascfield2 = new Int64Field("ascfield2", 0L, unstoredLong2);
Document doc = new Document();
// add fields, that have a distance to test general functionality
@@ -240,11 +252,11 @@ public virtual void TestInverseRange()
{
AtomicReaderContext context = (AtomicReaderContext)SlowCompositeReaderWrapper.Wrap(searcher.IndexReader).Context;
NumericRangeFilter f = NumericRangeFilter.NewInt64Range("field8", 8, 1000L, -1000L, true, true);
- Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A inverse range should return the null instance");
+ Assert.IsNull(f.GetDocIdSet(context, context.AtomicReader.LiveDocs), "A inverse range should return the null instance");
f = NumericRangeFilter.NewInt64Range("field8", 8, long.MaxValue, null, false, false);
- Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range starting with Long.MAX_VALUE should return the null instance");
+ Assert.IsNull(f.GetDocIdSet(context, context.AtomicReader.LiveDocs), "A exclusive range starting with Long.MAX_VALUE should return the null instance");
f = NumericRangeFilter.NewInt64Range("field8", 8, null, long.MinValue, false, false);
- Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range ending with Long.MIN_VALUE should return the null instance");
+ Assert.IsNull(f.GetDocIdSet(context, context.AtomicReader.LiveDocs), "A exclusive range ending with Long.MIN_VALUE should return the null instance");
}
[Test]
@@ -754,4 +766,4 @@ public virtual void TestEqualsAndHash()
// difference to int range is tested in TestNumericRangeQuery32
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
index 0cb04df2b7..9f00d9473e 100644
--- a/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestPhraseQuery.cs
@@ -30,15 +30,15 @@ namespace Lucene.Net.Search
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
+
using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
using Directory = Lucene.Net.Store.Directory;
using OpenMode = Lucene.Net.Index.OpenMode;
///
- /// Tests .
+ /// Tests .
///
- ///
+ ///
/*
* Remove ThreadLeaks and run with (Eclipse or command line):
* -ea -Drt.seed=AFD1E7E84B35D2B1
@@ -49,7 +49,8 @@ namespace Lucene.Net.Search
public class TestPhraseQuery : LuceneTestCase
{
///
- /// threshold for comparing floats
+ /// threshold for comparing floats
+ ///
public const float SCORE_COMP_THRESH = 1e-6f;
private static IndexSearcher searcher;
@@ -66,7 +67,7 @@ public override void BeforeClass()
Analyzer analyzer = new AnalyzerAnonymousClass();
RandomIndexWriter writer = new RandomIndexWriter(Random, directory, analyzer);
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
doc.Add(NewTextField("field", "one two three four five", Field.Store.YES));
doc.Add(NewTextField("repeated", "this is a repeated field - first part", Field.Store.YES));
IIndexableField repeatedField = NewTextField("repeated", "second part of a repeated field", Field.Store.YES);
@@ -74,11 +75,11 @@ public override void BeforeClass()
doc.Add(NewTextField("palindrome", "one two three two one", Field.Store.YES));
writer.AddDocument(doc);
- doc = new Documents.Document();
+ doc = new Document();
doc.Add(NewTextField("nonexist", "phrase exist notexist exist found", Field.Store.YES));
writer.AddDocument(doc);
- doc = new Documents.Document();
+ doc = new Document();
doc.Add(NewTextField("nonexist", "phrase exist notexist exist found", Field.Store.YES));
writer.AddDocument(doc);
@@ -90,10 +91,6 @@ public override void BeforeClass()
private sealed class AnalyzerAnonymousClass : Analyzer
{
- public AnalyzerAnonymousClass()
- {
- }
-
protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
{
return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
@@ -247,7 +244,7 @@ public virtual void TestPhraseQueryWithStopAnalyzer()
Directory directory = NewDirectory();
Analyzer stopAnalyzer = new MockAnalyzer(Random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, stopAnalyzer));
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
doc.Add(NewTextField("field", "the stop words are here", Field.Store.YES));
writer.AddDocument(doc);
IndexReader reader = writer.GetReader();
@@ -273,11 +270,11 @@ public virtual void TestPhraseQueryInConjunctionScorer()
Directory directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random, directory);
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
doc.Add(NewTextField("source", "marketing info", Field.Store.YES));
writer.AddDocument(doc);
- doc = new Documents.Document();
+ doc = new Document();
doc.Add(NewTextField("contents", "foobar", Field.Store.YES));
doc.Add(NewTextField("source", "marketing info", Field.Store.YES));
writer.AddDocument(doc);
@@ -305,15 +302,15 @@ public virtual void TestPhraseQueryInConjunctionScorer()
reader.Dispose();
writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE));
- doc = new Documents.Document();
+ doc = new Document();
doc.Add(NewTextField("contents", "map entry woo", Field.Store.YES));
writer.AddDocument(doc);
- doc = new Documents.Document();
+ doc = new Document();
doc.Add(NewTextField("contents", "woo map entry", Field.Store.YES));
writer.AddDocument(doc);
- doc = new Documents.Document();
+ doc = new Document();
doc.Add(NewTextField("contents", "map foobarword entry woo", Field.Store.YES));
writer.AddDocument(doc);
@@ -355,15 +352,15 @@ public virtual void TestSlopScoring()
Directory directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()).SetSimilarity(new DefaultSimilarity()));
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
doc.Add(NewTextField("field", "foo firstname lastname foo", Field.Store.YES));
writer.AddDocument(doc);
- Documents.Document doc2 = new Documents.Document();
+ Document doc2 = new Document();
doc2.Add(NewTextField("field", "foo firstname zzz lastname foo", Field.Store.YES));
writer.AddDocument(doc2);
- Documents.Document doc3 = new Documents.Document();
+ Document doc3 = new Document();
doc3.Add(NewTextField("field", "foo firstname zzz yyy lastname foo", Field.Store.YES));
writer.AddDocument(doc3);
@@ -632,7 +629,7 @@ public virtual void TestRandomPhrases()
RandomIndexWriter w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMergePolicy(NewLogMergePolicy()));
IList> docs = new JCG.List>();
- Documents.Document d = new Documents.Document();
+ Document d = new Document();
Field f = NewTextField("f", "", Field.Store.NO);
d.Add(f);
@@ -759,4 +756,4 @@ public virtual void TestNegativeSlop()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs
index bb421b1869..446d4364c3 100644
--- a/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs
+++ b/src/Lucene.Net.Tests/Search/TestPositionIncrement.cs
@@ -48,8 +48,6 @@ namespace Lucene.Net.Search
///
/// Term position unit test.
- ///
- ///
///
[TestFixture]
public class TestPositionIncrement : LuceneTestCase
@@ -60,7 +58,7 @@ public class TestPositionIncrement : LuceneTestCase
[Test]
public virtual void TestSetPosition()
{
- Analyzer analyzer = new AnalyzerAnonymousClass(this);
+ Analyzer analyzer = new AnalyzerAnonymousClass();
Directory store = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random, store, analyzer);
Document d = new Document();
@@ -168,13 +166,6 @@ public virtual void TestSetPosition()
private sealed class AnalyzerAnonymousClass : Analyzer
{
- private readonly TestPositionIncrement outerInstance;
-
- public AnalyzerAnonymousClass(TestPositionIncrement outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
{
return new TokenStreamComponents(new TokenizerAnonymousClass(reader));
@@ -318,4 +309,4 @@ public virtual void TestPayloadsPos0()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestPositiveScoresOnlyCollector.cs b/src/Lucene.Net.Tests/Search/TestPositiveScoresOnlyCollector.cs
index 33cb9b72db..69d2f11b21 100644
--- a/src/Lucene.Net.Tests/Search/TestPositiveScoresOnlyCollector.cs
+++ b/src/Lucene.Net.Tests/Search/TestPositiveScoresOnlyCollector.cs
@@ -65,7 +65,9 @@ public override long GetCost()
}
// The scores must have positive as well as negative values
- private static readonly float[] scores = new float[] { 0.7767749f, -1.7839992f, 8.9925785f, 7.9608946f, -0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f, -8.108544f, 4.961808f, 2.2423935f, -7.285586f, 4.6699767f };
+ private static readonly float[] scores = new float[] { 0.7767749f, -1.7839992f,
+ 8.9925785f, 7.9608946f, -0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f,
+ -8.108544f, 4.961808f, 2.2423935f, -7.285586f, 4.6699767f };
[Test]
public virtual void TestNegativeScores()
@@ -90,7 +92,7 @@ public virtual void TestNegativeScores()
IndexReader ir = writer.GetReader();
writer.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Weight fake = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher);
+ Weight fake = new TermQuery(new Term("fake", "weight")).CreateWeight(searcher);
Scorer s = new SimpleScorer(fake);
TopDocsCollector tdc = TopScoreDocCollector.Create(scores.Length, true);
ICollector c = new PositiveScoresOnlyCollector(tdc);
@@ -110,4 +112,4 @@ public virtual void TestNegativeScores()
directory.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestPrefixFilter.cs b/src/Lucene.Net.Tests/Search/TestPrefixFilter.cs
index e9c70e4a54..2d3a7d93fd 100644
--- a/src/Lucene.Net.Tests/Search/TestPrefixFilter.cs
+++ b/src/Lucene.Net.Tests/Search/TestPrefixFilter.cs
@@ -30,8 +30,7 @@ namespace Lucene.Net.Search
using Term = Lucene.Net.Index.Term;
///
- /// Tests class.
- ///
+ /// Tests class.
///
[TestFixture]
public class TestPrefixFilter : LuceneTestCase
@@ -111,4 +110,4 @@ public virtual void TestPrefixFilter_Mem()
directory.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestPrefixQuery.cs b/src/Lucene.Net.Tests/Search/TestPrefixQuery.cs
index 30ca4300ab..f69622f2c2 100644
--- a/src/Lucene.Net.Tests/Search/TestPrefixQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestPrefixQuery.cs
@@ -32,8 +32,7 @@ namespace Lucene.Net.Search
using Terms = Lucene.Net.Index.Terms;
///
- /// Tests class.
- ///
+ /// Tests class.
///
[TestFixture]
public class TestPrefixQuery : LuceneTestCase
@@ -72,4 +71,4 @@ public virtual void TestPrefixQuery_Mem()
directory.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestPrefixRandom.cs b/src/Lucene.Net.Tests/Search/TestPrefixRandom.cs
index b094f1345d..7e664801ca 100644
--- a/src/Lucene.Net.Tests/Search/TestPrefixRandom.cs
+++ b/src/Lucene.Net.Tests/Search/TestPrefixRandom.cs
@@ -56,7 +56,9 @@ public override void SetUp()
{
base.SetUp();
dir = NewDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
+ RandomIndexWriter writer = new RandomIndexWriter(Random, dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false))
+ .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
var doc = new Document();
Field field = NewStringField("field", "", Field.Store.NO);
@@ -88,32 +90,26 @@ public override void TearDown()
/// a stupid prefix query that just blasts thru the terms
private class DumbPrefixQuery : MultiTermQuery
{
- private readonly TestPrefixRandom outerInstance;
-
private readonly BytesRef prefix;
- internal DumbPrefixQuery(TestPrefixRandom outerInstance, Term term)
+ internal DumbPrefixQuery(Term term)
: base(term.Field)
{
- this.outerInstance = outerInstance;
prefix = term.Bytes;
}
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
{
- return new SimplePrefixTermsEnum(this, terms.GetEnumerator(), prefix);
+ return new SimplePrefixTermsEnum(terms.GetEnumerator(), prefix);
}
private class SimplePrefixTermsEnum : FilteredTermsEnum
{
- private readonly TestPrefixRandom.DumbPrefixQuery outerInstance;
-
private readonly BytesRef prefix;
- internal SimplePrefixTermsEnum(TestPrefixRandom.DumbPrefixQuery outerInstance, TermsEnum tenum, BytesRef prefix)
+ internal SimplePrefixTermsEnum(TermsEnum tenum, BytesRef prefix)
: base(tenum)
{
- this.outerInstance = outerInstance;
this.prefix = prefix;
SetInitialSeekTerm(new BytesRef(""));
}
@@ -149,11 +145,11 @@ public virtual void TestPrefixes()
private void AssertSame(string prefix)
{
PrefixQuery smart = new PrefixQuery(new Term("field", prefix));
- DumbPrefixQuery dumb = new DumbPrefixQuery(this, new Term("field", prefix));
+ DumbPrefixQuery dumb = new DumbPrefixQuery(new Term("field", prefix));
TopDocs smartDocs = searcher.Search(smart, 25);
TopDocs dumbDocs = searcher.Search(dumb, 25);
CheckHits.CheckEqual(smart, smartDocs.ScoreDocs, dumbDocs.ScoreDocs);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs b/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs
index b567d6af8f..cf069d9e9b 100644
--- a/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestQueryRescorer.cs
@@ -4,6 +4,7 @@
using RandomizedTesting.Generators;
using System;
using System.Collections.Generic;
+using System.Globalization;
using System.Text;
using Assert = Lucene.Net.TestFramework.Assert;
@@ -151,7 +152,7 @@ public virtual void TestCustomCombine()
pq.Add(new Term("field", "wizard"));
pq.Add(new Term("field", "oz"));
- TopDocs hits2 = new QueryRescorerAnonymousClass(this, pq)
+ TopDocs hits2 = new QueryRescorerAnonymousClass(pq)
.Rescore(searcher, hits, 10);
// Resorting didn't change the order:
@@ -165,12 +166,9 @@ public virtual void TestCustomCombine()
private sealed class QueryRescorerAnonymousClass : QueryRescorer
{
- private readonly TestQueryRescorer outerInstance;
-
- public QueryRescorerAnonymousClass(TestQueryRescorer outerInstance, PhraseQuery pq)
+ public QueryRescorerAnonymousClass(PhraseQuery pq)
: base(pq)
{
- this.outerInstance = outerInstance;
}
protected override float Combine(float firstPassScore, bool secondPassMatches, float secondPassScore)
@@ -218,7 +216,7 @@ public virtual void TestExplain()
pq.Add(new Term("field", "wizard"));
pq.Add(new Term("field", "oz"));
- Rescorer rescorer = new QueryRescorerAnonymousClass2(this, pq);
+ Rescorer rescorer = new QueryRescorerAnonymousClass2(pq);
TopDocs hits2 = rescorer.Rescore(searcher, hits, 10);
@@ -253,12 +251,9 @@ public virtual void TestExplain()
private sealed class QueryRescorerAnonymousClass2 : QueryRescorer
{
- private readonly TestQueryRescorer outerInstance;
-
- public QueryRescorerAnonymousClass2(TestQueryRescorer outerInstance, PhraseQuery pq)
+ public QueryRescorerAnonymousClass2(PhraseQuery pq)
: base(pq)
{
- this.outerInstance = outerInstance;
}
protected override float Combine(float firstPassScore, bool secondPassMatches, float secondPassScore)
@@ -363,7 +358,7 @@ public virtual void TestRandom()
//System.out.println("numHits=" + numHits + " reverse=" + reverse);
TopDocs hits = s.Search(new TermQuery(new Term("field", "a")), numHits);
- TopDocs hits2 = new QueryRescorerAnonymousClass3(this, new FixedScoreQuery(idToNum, reverse))
+ TopDocs hits2 = new QueryRescorerAnonymousClass3(new FixedScoreQuery(idToNum, reverse))
.Rescore(s, hits, numHits);
int[] expected = new int[numHits];
@@ -379,8 +374,8 @@ public virtual void TestRandom()
{
try
{
- int av = idToNum[Convert.ToInt32(r.Document(a).Get("id"))];
- int bv = idToNum[Convert.ToInt32(r.Document(b).Get("id"))];
+ int av = idToNum[Convert.ToInt32(r.Document(a).Get("id"), CultureInfo.InvariantCulture)];
+ int bv = idToNum[Convert.ToInt32(r.Document(b).Get("id"), CultureInfo.InvariantCulture)];
if (av < bv)
{
return -reverseInt;
@@ -406,7 +401,7 @@ public virtual void TestRandom()
for (int i = 0; i < numHits; i++)
{
//System.out.println("expected=" + expected[i] + " vs " + hits2.ScoreDocs[i].Doc + " v=" + idToNum[Integer.parseInt(r.Document(expected[i]).Get("id"))]);
- if ((int)expected[i] != hits2.ScoreDocs[i].Doc)
+ if (expected[i] != hits2.ScoreDocs[i].Doc)
{
//System.out.println(" diff!");
fail = true;
@@ -420,12 +415,9 @@ public virtual void TestRandom()
private sealed class QueryRescorerAnonymousClass3 : QueryRescorer
{
- private readonly TestQueryRescorer outerInstance;
-
- public QueryRescorerAnonymousClass3(TestQueryRescorer outerInstance, FixedScoreQuery fixedScoreQuery)
+ public QueryRescorerAnonymousClass3(FixedScoreQuery fixedScoreQuery)
: base(fixedScoreQuery)
{
- this.outerInstance = outerInstance;
}
protected override float Combine(float firstPassScore, bool secondPassMatches, float secondPassScore)
@@ -433,7 +425,7 @@ protected override float Combine(float firstPassScore, bool secondPassMatches, f
return secondPassScore;
}
}
-
+
///
/// Just assigns score == idToNum[doc("id")] for each doc.
private class FixedScoreQuery : Query
@@ -566,7 +558,7 @@ public override object Clone()
public override int GetHashCode()
{
- int PRIME = 31;
+ const int PRIME = 31; // LUCENENET: made const
int hash = base.GetHashCode();
if (reverse)
{
@@ -577,4 +569,4 @@ public override int GetHashCode()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs b/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs
index ff49a59e66..cc54e3cf12 100644
--- a/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestRegexpQuery.cs
@@ -115,23 +115,20 @@ public virtual void TestRegexComplement()
[Test]
public virtual void TestCustomProvider()
{
- IAutomatonProvider myProvider = new AutomatonProviderAnonymousClass(this);
+ IAutomatonProvider myProvider = new AutomatonProviderAnonymousClass();
RegexpQuery query = new RegexpQuery(NewTerm(""), RegExpSyntax.ALL, myProvider);
Assert.AreEqual(1, searcher.Search(query, 5).TotalHits);
}
private sealed class AutomatonProviderAnonymousClass : IAutomatonProvider
{
- private readonly TestRegexpQuery outerInstance;
-
- public AutomatonProviderAnonymousClass(TestRegexpQuery outerInstance)
- {
- this.outerInstance = outerInstance;
- quickBrownAutomaton = BasicOperations.Union(new Automaton[] { BasicAutomata.MakeString("quick"), BasicAutomata.MakeString("brown"), BasicAutomata.MakeString("bob") });
- }
-
// automaton that matches quick or brown
- private Automaton quickBrownAutomaton;
+ private Automaton quickBrownAutomaton = BasicOperations.Union(new Automaton[]
+ {
+ BasicAutomata.MakeString("quick"),
+ BasicAutomata.MakeString("brown"),
+ BasicAutomata.MakeString("bob")
+ });
public Automaton GetAutomaton(string name)
{
@@ -158,4 +155,4 @@ public virtual void TestBacktracking()
Assert.AreEqual(1, RegexQueryNrHits("4934[314]"));
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestRegexpRandom.cs b/src/Lucene.Net.Tests/Search/TestRegexpRandom.cs
index bc512c1d55..060ba58eee 100644
--- a/src/Lucene.Net.Tests/Search/TestRegexpRandom.cs
+++ b/src/Lucene.Net.Tests/Search/TestRegexpRandom.cs
@@ -52,7 +52,9 @@ public override void SetUp()
{
base.SetUp();
dir = NewDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
+ RandomIndexWriter writer = new RandomIndexWriter(Random, dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@@ -154,4 +156,4 @@ public virtual void TestRegexps()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs b/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs
index 4104056f31..7730df0664 100644
--- a/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs
+++ b/src/Lucene.Net.Tests/Search/TestRegexpRandom2.cs
@@ -68,7 +68,9 @@ public override void SetUp()
base.SetUp();
dir = NewDirectory();
fieldName = Random.NextBoolean() ? "field" : ""; // sometimes use an empty string as field name
- RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
+ RandomIndexWriter writer = new RandomIndexWriter(Random, dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false))
+ .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
Document doc = new Document();
Field field = NewStringField(fieldName, "", Field.Store.NO);
doc.Add(field);
@@ -127,16 +129,12 @@ protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts)
private sealed class SimpleAutomatonTermsEnum : FilteredTermsEnum
{
- private readonly TestRegexpRandom2.DumbRegexpQuery outerInstance;
-
private CharacterRunAutomaton runAutomaton;
private readonly CharsRef utf16 = new CharsRef(10);
internal SimpleAutomatonTermsEnum(TestRegexpRandom2.DumbRegexpQuery outerInstance, TermsEnum tenum)
: base(tenum)
{
- this.outerInstance = outerInstance;
-
runAutomaton = new CharacterRunAutomaton(outerInstance.automaton);
SetInitialSeekTerm(new BytesRef(""));
}
@@ -188,4 +186,4 @@ protected internal virtual void AssertSame(string regexp)
CheckHits.CheckEqual(smart, smartDocs.ScoreDocs, dumbDocs.ScoreDocs);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs b/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs
index 39b152182f..13ce75c65f 100644
--- a/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs
+++ b/src/Lucene.Net.Tests/Search/TestSameScoresWithThreads.cs
@@ -96,7 +96,7 @@ public virtual void Test()
ThreadJob[] threads = new ThreadJob[numThreads];
for (int threadID = 0; threadID < numThreads; threadID++)
{
- ThreadJob thread = new ThreadAnonymousClass(this, s, answers, startingGun);
+ ThreadJob thread = new ThreadAnonymousClass(s, answers, startingGun);
threads[threadID] = thread;
thread.Start();
}
@@ -112,15 +112,12 @@ public virtual void Test()
private sealed class ThreadAnonymousClass : ThreadJob
{
- private readonly TestSameScoresWithThreads outerInstance;
-
private readonly IndexSearcher s;
private readonly IDictionary answers;
private readonly CountdownEvent startingGun;
- public ThreadAnonymousClass(TestSameScoresWithThreads outerInstance, IndexSearcher s, IDictionary answers, CountdownEvent startingGun)
+ public ThreadAnonymousClass(IndexSearcher s, IDictionary answers, CountdownEvent startingGun)
{
- this.outerInstance = outerInstance;
this.s = s;
this.answers = answers;
this.startingGun = startingGun;
@@ -157,4 +154,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestScoreCachingWrappingScorer.cs b/src/Lucene.Net.Tests/Search/TestScoreCachingWrappingScorer.cs
index 5bf8287a10..2c8fd50ebe 100644
--- a/src/Lucene.Net.Tests/Search/TestScoreCachingWrappingScorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestScoreCachingWrappingScorer.cs
@@ -108,7 +108,9 @@ public void SetScorer(Scorer scorer)
public bool AcceptsDocsOutOfOrder => true;
}
- private static readonly float[] scores = new float[] { 0.7767749f, 1.7839992f, 8.9925785f, 7.9608946f, 0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f, 8.108544f, 4.961808f, 2.2423935f, 7.285586f, 4.6699767f };
+ private static readonly float[] scores = new float[] { 0.7767749f, 1.7839992f,
+ 8.9925785f, 7.9608946f, 0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f,
+ 8.108544f, 4.961808f, 2.2423935f, 7.285586f, 4.6699767f };
[Test]
public virtual void TestGetScores()
@@ -119,7 +121,7 @@ public virtual void TestGetScores()
IndexReader ir = writer.GetReader();
writer.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Weight fake = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher);
+ Weight fake = new TermQuery(new Term("fake", "weight")).CreateWeight(searcher);
Scorer s = new SimpleScorer(fake);
ScoreCachingCollector scc = new ScoreCachingCollector(scores.Length);
scc.SetScorer(s);
@@ -139,4 +141,4 @@ public virtual void TestGetScores()
directory.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestScorerPerf.cs b/src/Lucene.Net.Tests/Search/TestScorerPerf.cs
index 8396c01b23..15ae8cce01 100644
--- a/src/Lucene.Net.Tests/Search/TestScorerPerf.cs
+++ b/src/Lucene.Net.Tests/Search/TestScorerPerf.cs
@@ -45,7 +45,7 @@ public class TestScorerPerf : LuceneTestCase
internal bool validate = true; // set to false when doing performance testing
internal BitSet[] sets;
- internal Term[] terms;
+ //internal Term[] terms; // LUCENENET: see commented-out code below
internal IndexSearcher s;
internal IndexReader r;
internal Directory d;
@@ -63,34 +63,35 @@ public virtual void CreateDummySearcher()
s = NewSearcher(r);
}
- public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
- {
- int[] freq = new int[nTerms];
- terms = new Term[nTerms];
- for (int i = 0; i < nTerms; i++)
- {
- int f = (nTerms + 1) - i; // make first terms less frequent
- freq[i] = (int)Math.Ceiling(Math.Pow(f, power));
- terms[i] = new Term("f", char.ToString((char)('A' + i)));
- }
-
- IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE));
- for (int i = 0; i < nDocs; i++)
- {
- Document d = new Document();
- for (int j = 0; j < nTerms; j++)
- {
- if (Random.Next(freq[j]) == 0)
- {
- d.Add(NewStringField("f", terms[j].Text, Field.Store.NO));
- //System.out.println(d);
- }
- }
- iw.AddDocument(d);
- }
- iw.ForceMerge(1);
- iw.Dispose();
- }
+ // LUCENENET: unused so commented out here, only used by commented-out code below
+ // public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
+ // {
+ // int[] freq = new int[nTerms];
+ // terms = new Term[nTerms];
+ // for (int i = 0; i < nTerms; i++)
+ // {
+ // int f = (nTerms + 1) - i; // make first terms less frequent
+ // freq[i] = (int)Math.Ceiling(Math.Pow(f, power));
+ // terms[i] = new Term("f", char.ToString((char)('A' + i)));
+ // }
+ //
+ // IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE));
+ // for (int i = 0; i < nDocs; i++)
+ // {
+ // Document d = new Document();
+ // for (int j = 0; j < nTerms; j++)
+ // {
+ // if (Random.Next(freq[j]) == 0)
+ // {
+ // d.Add(NewStringField("f", terms[j].Text, Field.Store.NO));
+ // //System.out.println(d);
+ // }
+ // }
+ // iw.AddDocument(d);
+ // }
+ // iw.ForceMerge(1);
+ // iw.Dispose();
+ // }
public virtual BitSet RandBitSet(int sz, int numBitsToSet)
{
@@ -264,115 +265,116 @@ public virtual int DoNestedConjunctions(int iter, int maxOuterClauses, int maxCl
return ret;
}
- public virtual int DoTermConjunctions(IndexSearcher s, int termsInIndex, int maxClauses, int iter)
- {
- int ret = 0;
-
- long nMatches = 0;
- for (int i = 0; i < iter; i++)
- {
- int nClauses = Random.Next(maxClauses - 1) + 2; // min 2 clauses
- BooleanQuery bq = new BooleanQuery();
- BitSet termflag = new BitSet(termsInIndex);
- for (int j = 0; j < nClauses; j++)
- {
- int tnum;
- // don't pick same clause twice
- tnum = Random.Next(termsInIndex);
- if (termflag.Get(tnum))
- {
- tnum = termflag.NextClearBit(tnum);
- }
- if (tnum < 0 || tnum >= termsInIndex)
- {
- tnum = termflag.NextClearBit(0);
- }
- termflag.Set(tnum);
- Query tq = new TermQuery(terms[tnum]);
- bq.Add(tq, Occur.MUST);
- }
-
- CountingHitCollector hc = new CountingHitCollector();
- s.Search(bq, hc);
- nMatches += hc.Count;
- ret += hc.Sum;
- }
- if (Verbose)
- {
- Console.WriteLine("Average number of matches=" + (nMatches / iter));
- }
-
- return ret;
- }
-
- public virtual int DoNestedTermConjunctions(IndexSearcher s, int termsInIndex, int maxOuterClauses, int maxClauses, int iter)
- {
- int ret = 0;
- long nMatches = 0;
- for (int i = 0; i < iter; i++)
- {
- int oClauses = Random.Next(maxOuterClauses - 1) + 2;
- BooleanQuery oq = new BooleanQuery();
- for (int o = 0; o < oClauses; o++)
- {
- int nClauses = Random.Next(maxClauses - 1) + 2; // min 2 clauses
- BooleanQuery bq = new BooleanQuery();
- BitSet termflag = new BitSet(termsInIndex);
- for (int j = 0; j < nClauses; j++)
- {
- int tnum;
- // don't pick same clause twice
- tnum = Random.Next(termsInIndex);
- if (termflag.Get(tnum))
- {
- tnum = termflag.NextClearBit(tnum);
- }
- if (tnum < 0 || tnum >= 25)
- {
- tnum = termflag.NextClearBit(0);
- }
- termflag.Set(tnum);
- Query tq = new TermQuery(terms[tnum]);
- bq.Add(tq, Occur.MUST);
- } // inner
-
- oq.Add(bq, Occur.MUST);
- } // outer
-
- CountingHitCollector hc = new CountingHitCollector();
- s.Search(oq, hc);
- nMatches += hc.Count;
- ret += hc.Sum;
- }
- if (Verbose)
- {
- Console.WriteLine("Average number of matches=" + (nMatches / iter));
- }
- return ret;
- }
-
- public virtual int DoSloppyPhrase(IndexSearcher s, int termsInIndex, int maxClauses, int iter)
- {
- int ret = 0;
-
- for (int i = 0; i < iter; i++)
- {
- int nClauses = Random.Next(maxClauses - 1) + 2; // min 2 clauses
- PhraseQuery q = new PhraseQuery();
- for (int j = 0; j < nClauses; j++)
- {
- int tnum = Random.Next(termsInIndex);
- q.Add(new Term("f", char.ToString((char)(tnum + 'A'))), j);
- }
- q.Slop = termsInIndex; // this could be random too
-
- CountingHitCollector hc = new CountingHitCollector();
- s.Search(q, hc);
- ret += hc.Sum;
- }
-
- return ret;
- }
+ // LUCENENET: unused so commented out here, only used by commented-out code below
+ // public virtual int DoTermConjunctions(IndexSearcher s, int termsInIndex, int maxClauses, int iter)
+ // {
+ // int ret = 0;
+ //
+ // long nMatches = 0;
+ // for (int i = 0; i < iter; i++)
+ // {
+ // int nClauses = Random.Next(maxClauses - 1) + 2; // min 2 clauses
+ // BooleanQuery bq = new BooleanQuery();
+ // BitSet termflag = new BitSet(termsInIndex);
+ // for (int j = 0; j < nClauses; j++)
+ // {
+ // int tnum;
+ // // don't pick same clause twice
+ // tnum = Random.Next(termsInIndex);
+ // if (termflag.Get(tnum))
+ // {
+ // tnum = termflag.NextClearBit(tnum);
+ // }
+ // if (tnum < 0 || tnum >= termsInIndex)
+ // {
+ // tnum = termflag.NextClearBit(0);
+ // }
+ // termflag.Set(tnum);
+ // Query tq = new TermQuery(terms[tnum]);
+ // bq.Add(tq, Occur.MUST);
+ // }
+ //
+ // CountingHitCollector hc = new CountingHitCollector();
+ // s.Search(bq, hc);
+ // nMatches += hc.Count;
+ // ret += hc.Sum;
+ // }
+ // if (Verbose)
+ // {
+ // Console.WriteLine("Average number of matches=" + (nMatches / iter));
+ // }
+ //
+ // return ret;
+ // }
+ //
+ // public virtual int DoNestedTermConjunctions(IndexSearcher s, int termsInIndex, int maxOuterClauses, int maxClauses, int iter)
+ // {
+ // int ret = 0;
+ // long nMatches = 0;
+ // for (int i = 0; i < iter; i++)
+ // {
+ // int oClauses = Random.Next(maxOuterClauses - 1) + 2;
+ // BooleanQuery oq = new BooleanQuery();
+ // for (int o = 0; o < oClauses; o++)
+ // {
+ // int nClauses = Random.Next(maxClauses - 1) + 2; // min 2 clauses
+ // BooleanQuery bq = new BooleanQuery();
+ // BitSet termflag = new BitSet(termsInIndex);
+ // for (int j = 0; j < nClauses; j++)
+ // {
+ // int tnum;
+ // // don't pick same clause twice
+ // tnum = Random.Next(termsInIndex);
+ // if (termflag.Get(tnum))
+ // {
+ // tnum = termflag.NextClearBit(tnum);
+ // }
+ // if (tnum < 0 || tnum >= 25)
+ // {
+ // tnum = termflag.NextClearBit(0);
+ // }
+ // termflag.Set(tnum);
+ // Query tq = new TermQuery(terms[tnum]);
+ // bq.Add(tq, Occur.MUST);
+ // } // inner
+ //
+ // oq.Add(bq, Occur.MUST);
+ // } // outer
+ //
+ // CountingHitCollector hc = new CountingHitCollector();
+ // s.Search(oq, hc);
+ // nMatches += hc.Count;
+ // ret += hc.Sum;
+ // }
+ // if (Verbose)
+ // {
+ // Console.WriteLine("Average number of matches=" + (nMatches / iter));
+ // }
+ // return ret;
+ // }
+ //
+ // public virtual int DoSloppyPhrase(IndexSearcher s, int termsInIndex, int maxClauses, int iter)
+ // {
+ // int ret = 0;
+ //
+ // for (int i = 0; i < iter; i++)
+ // {
+ // int nClauses = Random.Next(maxClauses - 1) + 2; // min 2 clauses
+ // PhraseQuery q = new PhraseQuery();
+ // for (int j = 0; j < nClauses; j++)
+ // {
+ // int tnum = Random.Next(termsInIndex);
+ // q.Add(new Term("f", char.ToString((char)(tnum + 'A'))), j);
+ // }
+ // q.Slop = termsInIndex; // this could be random too
+ //
+ // CountingHitCollector hc = new CountingHitCollector();
+ // s.Search(q, hc);
+ // ret += hc.Sum;
+ // }
+ //
+ // return ret;
+ // }
[Test]
public virtual void TestConjunctions()
@@ -474,4 +476,4 @@ public virtual void TestConjunctions()
/// **
*/
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs b/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs
index 072ae6924b..1060663104 100644
--- a/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs
+++ b/src/Lucene.Net.Tests/Search/TestSearchWithThreads.cs
@@ -95,7 +95,7 @@ public virtual void Test()
for (int threadID = 0; threadID < NUM_SEARCH_THREADS; threadID++)
{
threads[threadID] = new ThreadAnonymousClass(this, s, failed, netSearch);
- threads[threadID].IsBackground = (true);
+ threads[threadID].IsBackground = true;
}
foreach (ThreadJob t in threads)
@@ -156,10 +156,10 @@ public override void Run()
}
catch (Exception exc) when (exc.IsException())
{
- failed.Value = (true);
+ failed.Value = true;
throw RuntimeException.Create(exc);
}
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSearcherManager.cs b/src/Lucene.Net.Tests/Search/TestSearcherManager.cs
index a3f7ce4de8..2f1449d242 100644
--- a/src/Lucene.Net.Tests/Search/TestSearcherManager.cs
+++ b/src/Lucene.Net.Tests/Search/TestSearcherManager.cs
@@ -122,7 +122,7 @@ public override IndexSearcher NewSearcher(IndexReader r)
protected override void DoSearching(TaskScheduler es, long stopTime)
{
ThreadJob reopenThread = new ThreadAnonymousClass(this, stopTime);
- reopenThread.IsBackground = (true);
+ reopenThread.IsBackground = true;
reopenThread.Start();
RunSearchThreads(stopTime);
@@ -277,7 +277,7 @@ public virtual void TestIntermediateClose()
AtomicBoolean triedReopen = new AtomicBoolean(false);
//TaskScheduler es = Random().NextBoolean() ? null : Executors.newCachedThreadPool(new NamedThreadFactory("testIntermediateClose"));
TaskScheduler es = Random.NextBoolean() ? null : TaskScheduler.Default;
- SearcherFactory factory = new SearcherFactoryAnonymousClass2(this, awaitEnterWarm, awaitClose, triedReopen, es);
+ SearcherFactory factory = new SearcherFactoryAnonymousClass2(awaitEnterWarm, awaitClose, triedReopen, es);
SearcherManager searcherManager = Random.NextBoolean() ? new SearcherManager(dir, factory) : new SearcherManager(writer, Random.NextBoolean(), factory);
if (Verbose)
{
@@ -296,7 +296,7 @@ public virtual void TestIntermediateClose()
writer.Commit();
AtomicBoolean success = new AtomicBoolean(false);
Exception[] exc = new Exception[1];
- ThreadJob thread = new ThreadJob(() => new RunnableAnonymousClass(this, triedReopen, searcherManager, success, exc).Run());
+ ThreadJob thread = new ThreadJob(() => new RunnableAnonymousClass(triedReopen, searcherManager, success, exc).Run());
thread.Start();
if (Verbose)
{
@@ -333,16 +333,13 @@ public virtual void TestIntermediateClose()
private sealed class SearcherFactoryAnonymousClass2 : SearcherFactory
{
- private readonly TestSearcherManager outerInstance;
-
private CountdownEvent awaitEnterWarm;
private CountdownEvent awaitClose;
private AtomicBoolean triedReopen;
private TaskScheduler es;
- public SearcherFactoryAnonymousClass2(TestSearcherManager outerInstance, CountdownEvent awaitEnterWarm, CountdownEvent awaitClose, AtomicBoolean triedReopen, TaskScheduler es)
+ public SearcherFactoryAnonymousClass2(CountdownEvent awaitEnterWarm, CountdownEvent awaitClose, AtomicBoolean triedReopen, TaskScheduler es)
{
- this.outerInstance = outerInstance;
this.awaitEnterWarm = awaitEnterWarm;
this.awaitClose = awaitClose;
this.triedReopen = triedReopen;
@@ -369,16 +366,13 @@ public override IndexSearcher NewSearcher(IndexReader r)
private sealed class RunnableAnonymousClass //: IThreadRunnable
{
- private readonly TestSearcherManager outerInstance;
-
private AtomicBoolean triedReopen;
private SearcherManager searcherManager;
private AtomicBoolean success;
private Exception[] exc;
- public RunnableAnonymousClass(TestSearcherManager outerInstance, AtomicBoolean triedReopen, SearcherManager searcherManager, AtomicBoolean success, Exception[] exc)
+ public RunnableAnonymousClass(AtomicBoolean triedReopen, SearcherManager searcherManager, AtomicBoolean success, Exception[] exc)
{
- this.outerInstance = outerInstance;
this.triedReopen = triedReopen;
this.searcherManager = searcherManager;
this.success = success;
@@ -389,13 +383,13 @@ public void Run()
{
try
{
- triedReopen.Value = (true);
+ triedReopen.Value = true;
if (Verbose)
{
Console.WriteLine("NOW call maybeReopen");
}
searcherManager.MaybeRefresh();
- success.Value = (true);
+ success.Value = true;
}
catch (Exception e) when (e.IsAlreadyClosedException())
{
@@ -410,7 +404,7 @@ public void Run()
}
exc[0] = e;
// use success as the barrier here to make sure we see the write
- success.Value = (false);
+ success.Value = false;
}
}
}
@@ -504,7 +498,7 @@ public virtual void TestListenerCalled()
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
SearcherManager sm = new SearcherManager(iw, false, new SearcherFactory());
- sm.AddListener(new RefreshListenerAnonymousClass(this, afterRefreshCalled));
+ sm.AddListener(new RefreshListenerAnonymousClass(afterRefreshCalled));
iw.AddDocument(new Document());
iw.Commit();
assertFalse(afterRefreshCalled);
@@ -517,13 +511,10 @@ public virtual void TestListenerCalled()
private sealed class RefreshListenerAnonymousClass : ReferenceManager.IRefreshListener
{
- private readonly TestSearcherManager outerInstance;
-
private AtomicBoolean afterRefreshCalled;
- public RefreshListenerAnonymousClass(TestSearcherManager outerInstance, AtomicBoolean afterRefreshCalled)
+ public RefreshListenerAnonymousClass(AtomicBoolean afterRefreshCalled)
{
- this.outerInstance = outerInstance;
this.afterRefreshCalled = afterRefreshCalled;
}
@@ -535,7 +526,7 @@ public void AfterRefresh(bool didRefresh)
{
if (didRefresh)
{
- afterRefreshCalled.Value = (true);
+ afterRefreshCalled.Value = true;
}
}
}
@@ -550,7 +541,7 @@ public virtual void TestEvilSearcherFactory()
IndexReader other = DirectoryReader.Open(dir);
- SearcherFactory theEvilOne = new SearcherFactoryAnonymousClass3(this, other);
+ SearcherFactory theEvilOne = new SearcherFactoryAnonymousClass3(other);
try
{
@@ -575,13 +566,10 @@ public virtual void TestEvilSearcherFactory()
private sealed class SearcherFactoryAnonymousClass3 : SearcherFactory
{
- private readonly TestSearcherManager outerInstance;
-
private IndexReader other;
- public SearcherFactoryAnonymousClass3(TestSearcherManager outerInstance, IndexReader other)
+ public SearcherFactoryAnonymousClass3(IndexReader other)
{
- this.outerInstance = outerInstance;
this.other = other;
}
@@ -602,7 +590,7 @@ public virtual void TestMaybeRefreshBlockingLock()
SearcherManager sm = new SearcherManager(dir, null);
- ThreadJob t = new ThreadAnonymousClass2(this, sm);
+ ThreadJob t = new ThreadAnonymousClass2(sm);
t.Start();
t.Join();
@@ -615,13 +603,10 @@ public virtual void TestMaybeRefreshBlockingLock()
private sealed class ThreadAnonymousClass2 : ThreadJob
{
- private readonly TestSearcherManager outerInstance;
-
private SearcherManager sm;
- public ThreadAnonymousClass2(TestSearcherManager outerInstance, SearcherManager sm)
+ public ThreadAnonymousClass2(SearcherManager sm)
{
- this.outerInstance = outerInstance;
this.sm = sm;
}
@@ -639,4 +624,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestShardSearching.cs b/src/Lucene.Net.Tests/Search/TestShardSearching.cs
index 0cefa1b93f..219aba5efd 100644
--- a/src/Lucene.Net.Tests/Search/TestShardSearching.cs
+++ b/src/Lucene.Net.Tests/Search/TestShardSearching.cs
@@ -1,7 +1,6 @@
using J2N.Collections.Generic.Extensions;
using NUnit.Framework;
using RandomizedTesting.Generators;
-using System;
using System.Collections.Generic;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
@@ -488,4 +487,4 @@ private PreviousSearchState AssertSame(IndexSearcher mockSearcher, NodeState.Sha
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSimilarity.cs b/src/Lucene.Net.Tests/Search/TestSimilarity.cs
index f148c354b4..2eaa86ea09 100644
--- a/src/Lucene.Net.Tests/Search/TestSimilarity.cs
+++ b/src/Lucene.Net.Tests/Search/TestSimilarity.cs
@@ -36,8 +36,6 @@ namespace Lucene.Net.Search
///
/// Similarity unit test.
- ///
- ///
///
[TestFixture]
public class TestSimilarity : LuceneTestCase
@@ -104,23 +102,23 @@ public virtual void TestSimilarity_Mem()
Term b = new Term("field", "b");
Term c = new Term("field", "c");
- searcher.Search(new TermQuery(b), new CollectorAnonymousClass(this));
+ searcher.Search(new TermQuery(b), new CollectorAnonymousClass());
BooleanQuery bq = new BooleanQuery();
bq.Add(new TermQuery(a), Occur.SHOULD);
bq.Add(new TermQuery(b), Occur.SHOULD);
//System.out.println(bq.toString("field"));
- searcher.Search(bq, new CollectorAnonymousClass2(this));
+ searcher.Search(bq, new CollectorAnonymousClass2());
PhraseQuery pq = new PhraseQuery();
pq.Add(a);
pq.Add(c);
//System.out.println(pq.toString("field"));
- searcher.Search(pq, new CollectorAnonymousClass3(this));
+ searcher.Search(pq, new CollectorAnonymousClass3());
pq.Slop = 2;
//System.out.println(pq.toString("field"));
- searcher.Search(pq, new CollectorAnonymousClass4(this));
+ searcher.Search(pq, new CollectorAnonymousClass4());
reader.Dispose();
store.Dispose();
@@ -128,13 +126,6 @@ public virtual void TestSimilarity_Mem()
private sealed class CollectorAnonymousClass : ICollector
{
- private readonly TestSimilarity outerInstance;
-
- public CollectorAnonymousClass(TestSimilarity outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
private Scorer scorer;
public void SetScorer(Scorer scorer)
@@ -156,15 +147,7 @@ public void SetNextReader(AtomicReaderContext context)
private sealed class CollectorAnonymousClass2 : ICollector
{
- private readonly TestSimilarity outerInstance;
-
- public CollectorAnonymousClass2(TestSimilarity outerInstance)
- {
- this.outerInstance = outerInstance;
- @base = 0;
- }
-
- private int @base;
+ private int @base = 0;
private Scorer scorer;
public void SetScorer(Scorer scorer)
@@ -188,13 +171,6 @@ public void SetNextReader(AtomicReaderContext context)
private sealed class CollectorAnonymousClass3 : ICollector
{
- private readonly TestSimilarity outerInstance;
-
- public CollectorAnonymousClass3(TestSimilarity outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
private Scorer scorer;
public void SetScorer(Scorer scorer)
@@ -217,13 +193,6 @@ public void SetNextReader(AtomicReaderContext context)
private sealed class CollectorAnonymousClass4 : ICollector
{
- private readonly TestSimilarity outerInstance;
-
- public CollectorAnonymousClass4(TestSimilarity outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
private Scorer scorer;
public void SetScorer(Scorer scorer)
@@ -244,4 +213,4 @@ public void SetNextReader(AtomicReaderContext context)
public bool AcceptsDocsOutOfOrder => true;
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSimilarityProvider.cs b/src/Lucene.Net.Tests/Search/TestSimilarityProvider.cs
index 5df7c9f39c..42211ca509 100644
--- a/src/Lucene.Net.Tests/Search/TestSimilarityProvider.cs
+++ b/src/Lucene.Net.Tests/Search/TestSimilarityProvider.cs
@@ -53,7 +53,7 @@ public override void SetUp()
{
base.SetUp();
directory = NewDirectory();
- PerFieldSimilarityWrapper sim = new ExampleSimilarityProvider(this);
+ PerFieldSimilarityWrapper sim = new ExampleSimilarityProvider();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetSimilarity(sim);
RandomIndexWriter iw = new RandomIndexWriter(Random, directory, iwc);
Document doc = new Document();
@@ -105,17 +105,8 @@ public virtual void TestBasics()
private class ExampleSimilarityProvider : PerFieldSimilarityWrapper
{
- private readonly TestSimilarityProvider outerInstance;
-
- public ExampleSimilarityProvider(TestSimilarityProvider outerInstance)
- {
- this.outerInstance = outerInstance;
- sim1 = new Sim1(outerInstance);
- sim2 = new Sim2(outerInstance);
- }
-
- private readonly Similarity sim1;
- private readonly Similarity sim2;
+ private readonly Similarity sim1 = new Sim1();
+ private readonly Similarity sim2 = new Sim2();
public override Similarity Get(string field)
{
@@ -132,13 +123,6 @@ public override Similarity Get(string field)
private class Sim1 : TFIDFSimilarity
{
- private readonly TestSimilarityProvider outerInstance;
-
- public Sim1(TestSimilarityProvider outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public override long EncodeNormValue(float f)
{
return (long)f;
@@ -187,13 +171,6 @@ public override float ScorePayload(int doc, int start, int end, BytesRef payload
private class Sim2 : TFIDFSimilarity
{
- private readonly TestSimilarityProvider outerInstance;
-
- public Sim2(TestSimilarityProvider outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public override long EncodeNormValue(float f)
{
return (long)f;
@@ -240,4 +217,4 @@ public override float ScorePayload(int doc, int start, int end, BytesRef payload
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSimpleExplanationsOfNonMatches.cs b/src/Lucene.Net.Tests/Search/TestSimpleExplanationsOfNonMatches.cs
index 486178f2a3..6ed12b0118 100644
--- a/src/Lucene.Net.Tests/Search/TestSimpleExplanationsOfNonMatches.cs
+++ b/src/Lucene.Net.Tests/Search/TestSimpleExplanationsOfNonMatches.cs
@@ -1,5 +1,3 @@
-using NUnit.Framework;
-
namespace Lucene.Net.Search
{
/*
@@ -28,10 +26,10 @@ public class TestSimpleExplanationsOfNonMatches : TestSimpleExplanations
///
/// Overrides superclass to ignore matches and focus on non-matches
///
- ///
+ ///
public override void Qtest(Query q, int[] expDocNrs)
{
CheckHits.CheckNoMatchExplanations(q, FIELD, searcher, expDocNrs);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs
index d26721dab2..17ef5195fa 100644
--- a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs
+++ b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery.cs
@@ -198,7 +198,7 @@ private static Document MakeDocument(string docText)
return doc;
}
-
+
private static PhraseQuery MakePhraseQuery(string terms)
{
@@ -339,7 +339,36 @@ public virtual void TestInfiniteFreq1()
[Test]
public virtual void TestInfiniteFreq2()
{
- string document = "So much fun to be had in my head " + "No more sunshine " + "So much fun just lying in my bed " + "No more sunshine " + "I can't face the sunlight and the dirt outside " + "Wanna stay in 666 where this darkness don't lie " + "Drug drug druggy " + "Got a feeling sweet like honey " + "Drug drug druggy " + "Need sensation like my baby " + "Show me your scars you're so aware " + "I'm not barbaric I just care " + "Drug drug drug " + "I need a reflection to prove I exist " + "No more sunshine " + "I am a victim of designer blitz " + "No more sunshine " + "Dance like a robot when you're chained at the knee " + "The C.I.A say you're all they'll ever need " + "Drug drug druggy " + "Got a feeling sweet like honey " + "Drug drug druggy " + "Need sensation like my baby " + "Snort your lines you're so aware " + "I'm not barbaric I just care " + "Drug drug druggy " + "Got a feeling sweet like honey " + "Drug drug druggy " + "Need sensation like my baby";
+ string document =
+ "So much fun to be had in my head " +
+ "No more sunshine " +
+ "So much fun just lying in my bed " +
+ "No more sunshine " +
+ "I can't face the sunlight and the dirt outside " +
+ "Wanna stay in 666 where this darkness don't lie " +
+ "Drug drug druggy " +
+ "Got a feeling sweet like honey " +
+ "Drug drug druggy " +
+ "Need sensation like my baby " +
+ "Show me your scars you're so aware " +
+ "I'm not barbaric I just care " +
+ "Drug drug drug " +
+ "I need a reflection to prove I exist " +
+ "No more sunshine " +
+ "I am a victim of designer blitz " +
+ "No more sunshine " +
+ "Dance like a robot when you're chained at the knee " +
+ "The C.I.A say you're all they'll ever need " +
+ "Drug drug druggy " +
+ "Got a feeling sweet like honey " +
+ "Drug drug druggy " +
+ "Need sensation like my baby " +
+ "Snort your lines you're so aware " +
+ "I'm not barbaric I just care " +
+ "Drug drug druggy " +
+ "Got a feeling sweet like honey " +
+ "Drug drug druggy " +
+ "Need sensation like my baby";
Directory dir = NewDirectory();
@@ -362,4 +391,4 @@ public virtual void TestInfiniteFreq2()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery2.cs b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery2.cs
index 02529ac3a7..59227adb88 100644
--- a/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery2.cs
+++ b/src/Lucene.Net.Tests/Search/TestSloppyPhraseQuery2.cs
@@ -1,7 +1,10 @@
using NUnit.Framework;
-using RandomizedTesting.Generators;
using System;
+#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE
+using RandomizedTesting.Generators;
+#endif
+
namespace Lucene.Net.Search
{
/*
@@ -244,4 +247,4 @@ private MultiPhraseQuery RandomPhraseQuery(long seed)
return pq;
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSort.cs b/src/Lucene.Net.Tests/Search/TestSort.cs
index 7abd5dd7b3..7c7cee1716 100644
--- a/src/Lucene.Net.Tests/Search/TestSort.cs
+++ b/src/Lucene.Net.Tests/Search/TestSort.cs
@@ -68,8 +68,8 @@ public class TestSort : LuceneTestCase
{
///
/// LUCENENET specific. Ensure we have an infostream attached to the default FieldCache
- /// when running the tests. In Java, this was done in the Core.Search.TestFieldCache.TestInfoStream()
- /// method (which polluted the state of these tests), but we need to make the tests self-contained
+ /// when running the tests. In Java, this was done in the Core.Search.TestFieldCache.TestInfoStream()
+ /// method (which polluted the state of these tests), but we need to make the tests self-contained
/// so they can be run correctly regardless of order. Not setting the InfoStream skips an execution
/// path within these tests, so we should do it to make sure we test all of the code.
///
@@ -1602,7 +1602,7 @@ public virtual void TestCustomIntParser()
iw.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Sort sort = new Sort(new SortField("parser", new IntParserAnonymousClass(this)), SortField.FIELD_DOC);
+ Sort sort = new Sort(new SortField("parser", new IntParserAnonymousClass()), SortField.FIELD_DOC);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
@@ -1620,13 +1620,6 @@ public virtual void TestCustomIntParser()
private sealed class IntParserAnonymousClass : FieldCache.IInt32Parser
{
- private readonly TestSort outerInstance;
-
- public IntParserAnonymousClass(TestSort outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
///
/// NOTE: This was parseInt() in Lucene
///
@@ -1663,7 +1656,7 @@ public virtual void TestCustomByteParser()
iw.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Sort sort = new Sort(new SortField("parser", new ByteParserAnonymousClass(this)), SortField.FIELD_DOC);
+ Sort sort = new Sort(new SortField("parser", new ByteParserAnonymousClass()), SortField.FIELD_DOC);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
@@ -1683,13 +1676,6 @@ public virtual void TestCustomByteParser()
private sealed class ByteParserAnonymousClass : FieldCache.IByteParser
#pragma warning restore 612, 618
{
- private readonly TestSort outerInstance;
-
- public ByteParserAnonymousClass(TestSort outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public byte ParseByte(BytesRef term)
{
return (byte)(term.Bytes[term.Offset] - 'A');
@@ -1723,7 +1709,7 @@ public virtual void TestCustomShortParser()
iw.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Sort sort = new Sort(new SortField("parser", new ShortParserAnonymousClass(this)), SortField.FIELD_DOC);
+ Sort sort = new Sort(new SortField("parser", new ShortParserAnonymousClass()), SortField.FIELD_DOC);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
@@ -1743,13 +1729,6 @@ public virtual void TestCustomShortParser()
private sealed class ShortParserAnonymousClass : FieldCache.IInt16Parser
#pragma warning restore 612, 618
{
- private readonly TestSort outerInstance;
-
- public ShortParserAnonymousClass(TestSort outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
///
/// NOTE: This was parseShort() in Lucene
///
@@ -1786,7 +1765,7 @@ public virtual void TestCustomLongParser()
iw.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Sort sort = new Sort(new SortField("parser", new LongParserAnonymousClass(this)), SortField.FIELD_DOC);
+ Sort sort = new Sort(new SortField("parser", new LongParserAnonymousClass()), SortField.FIELD_DOC);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
@@ -1804,13 +1783,6 @@ public virtual void TestCustomLongParser()
private sealed class LongParserAnonymousClass : FieldCache.IInt64Parser
{
- private readonly TestSort outerInstance;
-
- public LongParserAnonymousClass(TestSort outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
///
/// NOTE: This was parseLong() in Lucene
///
@@ -1847,7 +1819,7 @@ public virtual void TestCustomFloatParser()
iw.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Sort sort = new Sort(new SortField("parser", new FloatParserAnonymousClass(this)), SortField.FIELD_DOC);
+ Sort sort = new Sort(new SortField("parser", new FloatParserAnonymousClass()), SortField.FIELD_DOC);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
@@ -1865,13 +1837,6 @@ public virtual void TestCustomFloatParser()
private sealed class FloatParserAnonymousClass : FieldCache.ISingleParser
{
- private readonly TestSort outerInstance;
-
- public FloatParserAnonymousClass(TestSort outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
///
/// NOTE: This was parseFloat() in Lucene
///
@@ -1908,7 +1873,7 @@ public virtual void TestCustomDoubleParser()
iw.Dispose();
IndexSearcher searcher = NewSearcher(ir);
- Sort sort = new Sort(new SortField("parser", new DoubleParserAnonymousClass(this)), SortField.FIELD_DOC);
+ Sort sort = new Sort(new SortField("parser", new DoubleParserAnonymousClass()), SortField.FIELD_DOC);
TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort);
@@ -1926,13 +1891,6 @@ public virtual void TestCustomDoubleParser()
private sealed class DoubleParserAnonymousClass : FieldCache.IDoubleParser
{
- private readonly TestSort outerInstance;
-
- public DoubleParserAnonymousClass(TestSort outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public double ParseDouble(BytesRef term)
{
return Math.Pow(term.Bytes[term.Offset], (term.Bytes[term.Offset] - 'A'));
diff --git a/src/Lucene.Net.Tests/Search/TestSortRandom.cs b/src/Lucene.Net.Tests/Search/TestSortRandom.cs
index f69526518e..973d7e85e6 100644
--- a/src/Lucene.Net.Tests/Search/TestSortRandom.cs
+++ b/src/Lucene.Net.Tests/Search/TestSortRandom.cs
@@ -73,7 +73,7 @@ public virtual void TestRandomStringSort()
// 10% of the time, the document is missing the value:
BytesRef br;
- if (LuceneTestCase.Random.Next(10) != 7)
+ if (Random.Next(10) != 7)
{
string s;
if (random.NextBoolean())
@@ -217,7 +217,7 @@ public virtual void TestRandomStringSort()
// Compute expected results:
var expected = f.matchValues.ToList();
-
+
expected.Sort(Comparer.Create((a,b) =>
{
if (a is null)
@@ -350,4 +350,4 @@ public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDo
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSortRescorer.cs b/src/Lucene.Net.Tests/Search/TestSortRescorer.cs
index d409fd5b61..babbd7746a 100644
--- a/src/Lucene.Net.Tests/Search/TestSortRescorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestSortRescorer.cs
@@ -189,12 +189,12 @@ public virtual void TestRandom()
bool fail = false;
for (int i = 0; i < numHits; i++)
{
- fail |= (int)expected[i] != hits2.ScoreDocs[i].Doc;
+ fail |= expected[i] != hits2.ScoreDocs[i].Doc;
}
Assert.IsFalse(fail);
r.Dispose();
dir.Dispose();
- }
+ }
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs b/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs
index dfc981d373..13f45234cd 100644
--- a/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs
+++ b/src/Lucene.Net.Tests/Search/TestSubScorerFreqs.cs
@@ -47,11 +47,11 @@ public override void BeforeClass() // LUCENENET specific - renamed from MakeInde
int num = AtLeast(31);
for (int i = 0; i < num; i++)
{
- Documents.Document doc = new Documents.Document();
+ Document doc = new Document();
doc.Add(NewTextField("f", "a b c d b c d c d d", Field.Store.NO));
w.AddDocument(doc);
- doc = new Documents.Document();
+ doc = new Document();
doc.Add(NewTextField("f", "a b c d", Field.Store.NO));
w.AddDocument(doc);
}
@@ -228,4 +228,4 @@ public virtual void TestPhraseQuery()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestTermScorer.cs b/src/Lucene.Net.Tests/Search/TestTermScorer.cs
index cca779b45f..34491b4642 100644
--- a/src/Lucene.Net.Tests/Search/TestTermScorer.cs
+++ b/src/Lucene.Net.Tests/Search/TestTermScorer.cs
@@ -52,7 +52,10 @@ public override void SetUp()
base.SetUp();
directory = NewDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()).SetSimilarity(new DefaultSimilarity()));
+ RandomIndexWriter writer = new RandomIndexWriter(Random, directory,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NewLogMergePolicy())
+ .SetSimilarity(new DefaultSimilarity()));
for (int i = 0; i < values.Length; i++)
{
Document doc = new Document();
@@ -82,13 +85,13 @@ public virtual void Test()
Weight weight = indexSearcher.CreateNormalizedWeight(termQuery);
Assert.IsTrue(indexSearcher.TopReaderContext is AtomicReaderContext);
AtomicReaderContext context = (AtomicReaderContext)indexSearcher.TopReaderContext;
- BulkScorer ts = weight.GetBulkScorer(context, true, (context.AtomicReader).LiveDocs);
+ BulkScorer ts = weight.GetBulkScorer(context, true, context.AtomicReader.LiveDocs);
// we have 2 documents with the term all in them, one document for all the
// other values
IList docs = new JCG.List();
// must call next first
- ts.Score(new CollectorAnonymousClass(this, context, docs));
+ ts.Score(new CollectorAnonymousClass(this, docs));
Assert.IsTrue(docs.Count == 2, "docs Size: " + docs.Count + " is not: " + 2);
TestHit doc0 = docs[0];
TestHit doc5 = docs[1];
@@ -110,13 +113,11 @@ private sealed class CollectorAnonymousClass : ICollector
{
private readonly TestTermScorer outerInstance;
- private AtomicReaderContext context;
private readonly IList docs;
- public CollectorAnonymousClass(TestTermScorer outerInstance, AtomicReaderContext context, IList docs)
+ public CollectorAnonymousClass(TestTermScorer outerInstance, IList docs)
{
this.outerInstance = outerInstance;
- this.context = context;
this.docs = docs;
@base = 0;
}
@@ -133,7 +134,7 @@ public void Collect(int doc)
{
float score = scorer.GetScore();
doc = doc + @base;
- docs.Add(new TestHit(outerInstance, doc, score));
+ docs.Add(new TestHit(doc, score));
Assert.IsTrue(score > 0, "score " + score + " is not greater than 0");
Assert.IsTrue(doc == 0 || doc == 5, "Doc: " + doc + " does not equal 0 or doc does not equal 5");
}
@@ -180,14 +181,11 @@ public virtual void TestAdvance()
private class TestHit
{
- private readonly TestTermScorer outerInstance;
-
public int Doc { get; }
public float Score { get; }
- public TestHit(TestTermScorer outerInstance, int doc, float score)
+ public TestHit(int doc, float score)
{
- this.outerInstance = outerInstance;
this.Doc = doc;
this.Score = score;
}
@@ -198,4 +196,4 @@ public override string ToString()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs
index 2abe196abb..20fa9066ac 100644
--- a/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs
+++ b/src/Lucene.Net.Tests/Search/TestTimeLimitingCollector.cs
@@ -39,22 +39,22 @@ namespace Lucene.Net.Search
///
public class TestTimeLimitingCollector : LuceneTestCase
{
- private static readonly int SLOW_DOWN = 3;
- private static readonly long TIME_ALLOWED = 17 * SLOW_DOWN; // so searches can find about 17 docs.
+ private const int SLOW_DOWN = 3;
+ private const long TIME_ALLOWED = 17 * SLOW_DOWN; // so searches can find about 17 docs.
- // max time allowed is relaxed for multithreading tests.
- // the multithread case fails when setting this to 1 (no slack) and launching many threads (>2000).
+ // max time allowed is relaxed for multithreading tests.
+ // the multithread case fails when setting this to 1 (no slack) and launching many threads (>2000).
// but this is not a real failure, just noise.
- private static readonly double MULTI_THREAD_SLACK = 7;
+ private const double MULTI_THREAD_SLACK = 7;
- private static readonly int N_DOCS = 3000;
- private static readonly int N_THREADS = 50;
+ private const int N_DOCS = 3000;
+ private const int N_THREADS = 50;
private IndexSearcher searcher;
private Directory directory;
private IndexReader reader;
- private readonly string FIELD_NAME = "body";
+ private const string FIELD_NAME = "body";
private Query query;
private Counter counter;
private TimeLimitingCollector.TimerThread counterThread;
@@ -67,7 +67,7 @@ public class TestTimeLimitingCollector : LuceneTestCase
public override void SetUp()
{
base.SetUp();
- counter = Lucene.Net.Util.Counter.NewCounter(true);
+ counter = Counter.NewCounter(true);
counterThread = new TimeLimitingCollector.TimerThread(counter);
counterThread.Start();
string[] docText = {
@@ -167,7 +167,7 @@ private void DoTestSearch()
private ICollector CreateTimedCollector(MyHitCollector hc, long timeAllowed, bool greedy)
{
TimeLimitingCollector res = new TimeLimitingCollector(hc, counter, timeAllowed);
- res.IsGreedy = (greedy); // set to true to make sure at least one doc is collected.
+ res.IsGreedy = greedy; // set to true to make sure at least one doc is collected.
return res;
}
@@ -252,7 +252,7 @@ private long MaxTime(bool multiThreaded)
long res = 2 * counterThread.Resolution + TIME_ALLOWED + SLOW_DOWN; // some slack for less noise in this test
if (multiThreaded)
{
- res = (long)(res * MULTI_THREAD_SLACK); // larger slack
+ res = (long)(res * MULTI_THREAD_SLACK); // larger slack
}
return res;
}
@@ -272,7 +272,7 @@ private string MaxTimeStr(bool multiThreaded)
}
/**
- * Test timeout behavior when resolution is modified.
+ * Test timeout behavior when resolution is modified.
*/
[Test]
public void TestModifyResolution()
@@ -301,7 +301,7 @@ public void TestModifyResolution()
}
}
- /**
+ /**
* Test correctness with multiple searching threads.
*/
[Test]
@@ -310,7 +310,7 @@ public void TestSearchMultiThreaded()
DoTestMultiThreads(false);
}
- /**
+ /**
* Test correctness with multiple searching threads.
*/
[Test]
@@ -345,6 +345,7 @@ private sealed class ThreadAnonymousClass : ThreadJob
private readonly OpenBitSet success;
private readonly bool withTimeout;
private readonly int num;
+
public ThreadAnonymousClass(TestTimeLimitingCollector outerInstance, OpenBitSet success, bool withTimeout, int num)
{
this.outerInstance = outerInstance;
@@ -352,6 +353,7 @@ public ThreadAnonymousClass(TestTimeLimitingCollector outerInstance, OpenBitSet
this.withTimeout = withTimeout;
this.num = num;
}
+
public override void Run()
{
if (withTimeout)
@@ -362,6 +364,8 @@ public override void Run()
{
outerInstance.DoTestSearch();
}
+
+ // LUCENENET: using UninterruptableMonitor instead of lock, see UninterruptableMonitor docs
UninterruptableMonitor.Enter(success);
try
{
diff --git a/src/Lucene.Net.Tests/Search/TestTopDocsCollector.cs b/src/Lucene.Net.Tests/Search/TestTopDocsCollector.cs
index d420f07bc2..48dc289c65 100644
--- a/src/Lucene.Net.Tests/Search/TestTopDocsCollector.cs
+++ b/src/Lucene.Net.Tests/Search/TestTopDocsCollector.cs
@@ -85,7 +85,14 @@ public override void SetScorer(Scorer scorer)
// Scores array to be used by MyTopDocsCollector. If it is changed, MAX_SCORE
// must also change.
- private static readonly float[] scores = new float[] { 0.7767749f, 1.7839992f, 8.9925785f, 7.9608946f, 0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f, 8.108544f, 4.961808f, 2.2423935f, 7.285586f, 4.6699767f, 2.9655676f, 6.953706f, 5.383931f, 6.9916306f, 8.365894f, 7.888485f, 8.723962f, 3.1796896f, 0.39971232f, 1.3077754f, 6.8489285f, 9.17561f, 5.060466f, 7.9793315f, 8.601509f, 4.1858315f, 0.28146625f };
+ private static readonly float[] scores = new float[]
+ {
+ 0.7767749f, 1.7839992f, 8.9925785f, 7.9608946f, 0.07948637f, 2.6356435f,
+ 7.4950366f, 7.1490803f, 8.108544f, 4.961808f, 2.2423935f, 7.285586f, 4.6699767f,
+ 2.9655676f, 6.953706f, 5.383931f, 6.9916306f, 8.365894f, 7.888485f, 8.723962f,
+ 3.1796896f, 0.39971232f, 1.3077754f, 6.8489285f, 9.17561f, 5.060466f, 7.9793315f,
+ 8.601509f, 4.1858315f, 0.28146625f
+ };
private const float MAX_SCORE = 9.17561f;
@@ -227,4 +234,4 @@ public virtual void TestResultsOrder()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestTopFieldCollector.cs b/src/Lucene.Net.Tests/Search/TestTopFieldCollector.cs
index 82b89eed96..c8c4969257 100644
--- a/src/Lucene.Net.Tests/Search/TestTopFieldCollector.cs
+++ b/src/Lucene.Net.Tests/Search/TestTopFieldCollector.cs
@@ -178,8 +178,28 @@ public virtual void TestOutOfOrderDocsScoringSort()
{
// Two Sort criteria to instantiate the multi/single comparers.
Sort[] sort = new Sort[] { new Sort(SortField.FIELD_DOC), new Sort() };
- bool[][] tfcOptions = new bool[][] { new bool[] { false, false, false }, new bool[] { false, false, true }, new bool[] { false, true, false }, new bool[] { false, true, true }, new bool[] { true, false, false }, new bool[] { true, false, true }, new bool[] { true, true, false }, new bool[] { true, true, true } };
- string[] actualTFCClasses = new string[] { "OutOfOrderOneComparerNonScoringCollector", "OutOfOrderOneComparerScoringMaxScoreCollector", "OutOfOrderOneComparerScoringNoMaxScoreCollector", "OutOfOrderOneComparerScoringMaxScoreCollector", "OutOfOrderOneComparerNonScoringCollector", "OutOfOrderOneComparerScoringMaxScoreCollector", "OutOfOrderOneComparerScoringNoMaxScoreCollector", "OutOfOrderOneComparerScoringMaxScoreCollector" };
+ bool[][] tfcOptions = new bool[][]
+ {
+ new bool[] { false, false, false },
+ new bool[] { false, false, true },
+ new bool[] { false, true, false },
+ new bool[] { false, true, true },
+ new bool[] { true, false, false },
+ new bool[] { true, false, true },
+ new bool[] { true, true, false },
+ new bool[] { true, true, true }
+ };
+ string[] actualTFCClasses = new string[]
+ {
+ "OutOfOrderOneComparerNonScoringCollector",
+ "OutOfOrderOneComparerScoringMaxScoreCollector",
+ "OutOfOrderOneComparerScoringNoMaxScoreCollector",
+ "OutOfOrderOneComparerScoringMaxScoreCollector",
+ "OutOfOrderOneComparerNonScoringCollector",
+ "OutOfOrderOneComparerScoringMaxScoreCollector",
+ "OutOfOrderOneComparerScoringNoMaxScoreCollector",
+ "OutOfOrderOneComparerScoringMaxScoreCollector"
+ };
BooleanQuery bq = new BooleanQuery();
// Add a Query with SHOULD, since bw.Scorer() returns BooleanScorer2
@@ -211,8 +231,28 @@ public virtual void TestOutOfOrderDocsScoringSortMulti()
{
// Two Sort criteria to instantiate the multi/single comparers.
Sort[] sort = new Sort[] { new Sort(SortField.FIELD_DOC, SortField.FIELD_SCORE) };
- bool[][] tfcOptions = new bool[][] { new bool[] { false, false, false }, new bool[] { false, false, true }, new bool[] { false, true, false }, new bool[] { false, true, true }, new bool[] { true, false, false }, new bool[] { true, false, true }, new bool[] { true, true, false }, new bool[] { true, true, true } };
- string[] actualTFCClasses = new string[] { "OutOfOrderMultiComparerNonScoringCollector", "OutOfOrderMultiComparerScoringMaxScoreCollector", "OutOfOrderMultiComparerScoringNoMaxScoreCollector", "OutOfOrderMultiComparerScoringMaxScoreCollector", "OutOfOrderMultiComparerNonScoringCollector", "OutOfOrderMultiComparerScoringMaxScoreCollector", "OutOfOrderMultiComparerScoringNoMaxScoreCollector", "OutOfOrderMultiComparerScoringMaxScoreCollector" };
+ bool[][] tfcOptions = new bool[][]
+ {
+ new bool[] { false, false, false },
+ new bool[] { false, false, true },
+ new bool[] { false, true, false },
+ new bool[] { false, true, true },
+ new bool[] { true, false, false },
+ new bool[] { true, false, true },
+ new bool[] { true, true, false },
+ new bool[] { true, true, true }
+ };
+ string[] actualTFCClasses = new string[]
+ {
+ "OutOfOrderMultiComparerNonScoringCollector",
+ "OutOfOrderMultiComparerScoringMaxScoreCollector",
+ "OutOfOrderMultiComparerScoringNoMaxScoreCollector",
+ "OutOfOrderMultiComparerScoringMaxScoreCollector",
+ "OutOfOrderMultiComparerNonScoringCollector",
+ "OutOfOrderMultiComparerScoringMaxScoreCollector",
+ "OutOfOrderMultiComparerScoringNoMaxScoreCollector",
+ "OutOfOrderMultiComparerScoringMaxScoreCollector"
+ };
BooleanQuery bq = new BooleanQuery();
// Add a Query with SHOULD, since bw.Scorer() returns BooleanScorer2
@@ -252,4 +292,4 @@ public virtual void TestSortWithScoreAndMaxScoreTrackingNoResults()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestWildcard.cs b/src/Lucene.Net.Tests/Search/TestWildcard.cs
index 4c9e5cc4be..b0d9762d9e 100644
--- a/src/Lucene.Net.Tests/Search/TestWildcard.cs
+++ b/src/Lucene.Net.Tests/Search/TestWildcard.cs
@@ -292,18 +292,91 @@ public virtual void TestParsingAndSearching()
string[] docs = new string[] { "\\ abcdefg1", "\\x00079 hijklmn1", "\\\\ opqrstu1" };
// queries that should find all docs
- Query[] matchAll = new Query[] { new WildcardQuery(new Term(field, "*")), new WildcardQuery(new Term(field, "*1")), new WildcardQuery(new Term(field, "**1")), new WildcardQuery(new Term(field, "*?")), new WildcardQuery(new Term(field, "*?1")), new WildcardQuery(new Term(field, "?*1")), new WildcardQuery(new Term(field, "**")), new WildcardQuery(new Term(field, "***")), new WildcardQuery(new Term(field, "\\\\*")) };
+ Query[] matchAll = new Query[]
+ {
+ new WildcardQuery(new Term(field, "*")),
+ new WildcardQuery(new Term(field, "*1")),
+ new WildcardQuery(new Term(field, "**1")),
+ new WildcardQuery(new Term(field, "*?")),
+ new WildcardQuery(new Term(field, "*?1")),
+ new WildcardQuery(new Term(field, "?*1")),
+ new WildcardQuery(new Term(field, "**")),
+ new WildcardQuery(new Term(field, "***")),
+ new WildcardQuery(new Term(field, "\\\\*"))
+ };
// queries that should find no docs
- Query[] matchNone = new Query[] { new WildcardQuery(new Term(field, "a*h")), new WildcardQuery(new Term(field, "a?h")), new WildcardQuery(new Term(field, "*a*h")), new WildcardQuery(new Term(field, "?a")), new WildcardQuery(new Term(field, "a?")) };
-
- PrefixQuery[][] matchOneDocPrefix = new PrefixQuery[][] { new PrefixQuery[] { new PrefixQuery(new Term(field, "a")), new PrefixQuery(new Term(field, "ab")), new PrefixQuery(new Term(field, "abc")) }, new PrefixQuery[] { new PrefixQuery(new Term(field, "h")), new PrefixQuery(new Term(field, "hi")), new PrefixQuery(new Term(field, "hij")), new PrefixQuery(new Term(field, "\\x0007")) }, new PrefixQuery[] { new PrefixQuery(new Term(field, "o")), new PrefixQuery(new Term(field, "op")), new PrefixQuery(new Term(field, "opq")), new PrefixQuery(new Term(field, "\\\\")) } };
+ Query[] matchNone = new Query[]
+ {
+ new WildcardQuery(new Term(field, "a*h")),
+ new WildcardQuery(new Term(field, "a?h")),
+ new WildcardQuery(new Term(field, "*a*h")),
+ new WildcardQuery(new Term(field, "?a")),
+ new WildcardQuery(new Term(field, "a?"))
+ };
+
+ PrefixQuery[][] matchOneDocPrefix = new PrefixQuery[][]
+ {
+ new[]
+ {
+ new PrefixQuery(new Term(field, "a")),
+ new PrefixQuery(new Term(field, "ab")),
+ new PrefixQuery(new Term(field, "abc")) // these should find only doc 0
+ },
+ new[]
+ {
+ new PrefixQuery(new Term(field, "h")),
+ new PrefixQuery(new Term(field, "hi")),
+ new PrefixQuery(new Term(field, "hij")),
+ new PrefixQuery(new Term(field, "\\x0007")) // these should find only doc 1
+ },
+ new[]
+ {
+ new PrefixQuery(new Term(field, "o")),
+ new PrefixQuery(new Term(field, "op")),
+ new PrefixQuery(new Term(field, "opq")),
+ new PrefixQuery(new Term(field, "\\\\")) // these should find only doc 2
+ }
+ };
- WildcardQuery[][] matchOneDocWild = new WildcardQuery[][] { new WildcardQuery[] { new WildcardQuery(new Term(field, "*a*")), new WildcardQuery(new Term(field, "*ab*")), new WildcardQuery(new Term(field, "*abc**")), new WildcardQuery(new Term(field, "ab*e*")), new WildcardQuery(new Term(field, "*g?")), new WildcardQuery(new Term(field, "*f?1")) }, new WildcardQuery[] { new WildcardQuery(new Term(field, "*h*")), new WildcardQuery(new Term(field, "*hi*")), new WildcardQuery(new Term(field, "*hij**")), new WildcardQuery(new Term(field, "hi*k*")), new WildcardQuery(new Term(field, "*n?")), new WildcardQuery(new Term(field, "*m?1")), new WildcardQuery(new Term(field, "hij**")) }, new WildcardQuery[] { new WildcardQuery(new Term(field, "*o*")), new WildcardQuery(new Term(field, "*op*")), new WildcardQuery(new Term(field, "*opq**")), new WildcardQuery(new Term(field, "op*q*")), new WildcardQuery(new Term(field, "*u?")), new WildcardQuery(new Term(field, "*t?1")), new WildcardQuery(new Term(field, "opq**")) } };
+ WildcardQuery[][] matchOneDocWild = new WildcardQuery[][]
+ {
+ new[]
+ {
+ new WildcardQuery(new Term(field, "*a*")), // these should find only doc 0
+ new WildcardQuery(new Term(field, "*ab*")),
+ new WildcardQuery(new Term(field, "*abc**")),
+ new WildcardQuery(new Term(field, "ab*e*")),
+ new WildcardQuery(new Term(field, "*g?")),
+ new WildcardQuery(new Term(field, "*f?1"))
+ },
+ new[]
+ {
+ new WildcardQuery(new Term(field, "*h*")), // these should find only doc 1
+ new WildcardQuery(new Term(field, "*hi*")),
+ new WildcardQuery(new Term(field, "*hij**")),
+ new WildcardQuery(new Term(field, "hi*k*")),
+ new WildcardQuery(new Term(field, "*n?")),
+ new WildcardQuery(new Term(field, "*m?1")),
+ new WildcardQuery(new Term(field, "hij**"))
+ },
+ new[]
+ {
+ new WildcardQuery(new Term(field, "*o*")), // these should find only doc 2
+ new WildcardQuery(new Term(field, "*op*")),
+ new WildcardQuery(new Term(field, "*opq**")),
+ new WildcardQuery(new Term(field, "op*q*")),
+ new WildcardQuery(new Term(field, "*u?")),
+ new WildcardQuery(new Term(field, "*t?1")),
+ new WildcardQuery(new Term(field, "opq**"))
+ }
+ };
// prepare the index
Directory dir = NewDirectory();
- RandomIndexWriter iw = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()));
+ RandomIndexWriter iw = new RandomIndexWriter(Random, dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NewLogMergePolicy()));
for (int i = 0; i < docs.Length; i++)
{
Document doc = new Document();
@@ -373,4 +446,4 @@ public virtual void TestParsingAndSearching()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Search/TestWildcardRandom.cs b/src/Lucene.Net.Tests/Search/TestWildcardRandom.cs
index bd23d1cb2e..61d7b552df 100644
--- a/src/Lucene.Net.Tests/Search/TestWildcardRandom.cs
+++ b/src/Lucene.Net.Tests/Search/TestWildcardRandom.cs
@@ -1,7 +1,6 @@
using Lucene.Net.Documents;
using Lucene.Net.Index.Extensions;
using NUnit.Framework;
-using System;
using System.Text;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
@@ -53,7 +52,9 @@ public override void SetUp()
{
base.SetUp();
dir = NewDirectory();
- RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
+ RandomIndexWriter writer = new RandomIndexWriter(Random, dir,
+ NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
Document doc = new Document();
Field field = NewStringField("field", "", Field.Store.NO);
@@ -157,4 +158,4 @@ public virtual void TestWildcards()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs b/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs
index 685d97ce2f..ce092ad5e5 100644
--- a/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs
+++ b/src/Lucene.Net.Tests/Store/TestBufferedIndexInput.cs
@@ -42,15 +42,16 @@ namespace Lucene.Net.Store
[TestFixture]
public class TestBufferedIndexInput : LuceneTestCase
{
- private static void WriteBytes(FileInfo aFile, long size)
- {
- using FileStream ostream = new FileStream(aFile.FullName, FileMode.Create);
- for (int i = 0; i < size; i++)
- {
- ostream.WriteByte(Byten(i));
- }
- ostream.Flush();
- }
+ // LUCENENET: unused, commenting out until it is needed
+ // private static void WriteBytes(FileInfo aFile, long size)
+ // {
+ // using FileStream ostream = new FileStream(aFile.FullName, FileMode.Create);
+ // for (int i = 0; i < size; i++)
+ // {
+ // ostream.WriteByte(Byten(i));
+ // }
+ // ostream.Flush();
+ // }
private const long TEST_FILE_LENGTH = 100 * 1024;
@@ -79,17 +80,18 @@ public virtual void TestReadBytes()
RunReadBytes(input, BufferedIndexInput.BUFFER_SIZE, Random);
}
- private void RunReadBytesAndClose(IndexInput input, int bufferSize, Random r)
- {
- try
- {
- RunReadBytes(input, bufferSize, r);
- }
- finally
- {
- input.Dispose();
- }
- }
+ // LUCENENET: unused, commenting out until it is needed
+ // private void RunReadBytesAndClose(IndexInput input, int bufferSize, Random r)
+ // {
+ // try
+ // {
+ // RunReadBytes(input, bufferSize, r);
+ // }
+ // finally
+ // {
+ // input.Dispose();
+ // }
+ // }
private void RunReadBytes(IndexInput input, int bufferSize, Random r)
{
@@ -155,7 +157,7 @@ private void CheckReadBytes(IndexInput input, int size, int pos)
Assert.AreEqual(pos + size, input.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream
for (int i = 0; i < size; i++)
{
- Assert.AreEqual(Byten(pos + i), (byte)buffer[offset + i], "pos=" + i + " filepos=" + (pos + i));
+ Assert.AreEqual(Byten(pos + i), buffer[offset + i], "pos=" + i + " filepos=" + (pos + i));
}
}
@@ -262,7 +264,7 @@ public virtual void TestSetBufferSize()
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetOpenMode(OpenMode.CREATE)
.SetMergePolicy(NewLogMergePolicy(false)));
-
+
for (int i = 0; i < 37; i++)
{
var doc = new Document();
@@ -387,4 +389,4 @@ public override long FileLength(string name)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestCopyBytes.cs b/src/Lucene.Net.Tests/Store/TestCopyBytes.cs
index 465209b201..4ff093a951 100644
--- a/src/Lucene.Net.Tests/Store/TestCopyBytes.cs
+++ b/src/Lucene.Net.Tests/Store/TestCopyBytes.cs
@@ -3,7 +3,6 @@
using NUnit.Framework;
using RandomizedTesting.Generators;
using System;
-using System.IO;
using Assert = Lucene.Net.TestFramework.Assert;
using Console = Lucene.Net.Util.SystemConsole;
@@ -200,4 +199,4 @@ public override void Run()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestDirectory.cs b/src/Lucene.Net.Tests/Store/TestDirectory.cs
index 02339d6fd1..6c47cc70a7 100644
--- a/src/Lucene.Net.Tests/Store/TestDirectory.cs
+++ b/src/Lucene.Net.Tests/Store/TestDirectory.cs
@@ -41,7 +41,12 @@ public class TestDirectory : LuceneTestCase
public virtual void TestDetectClose()
{
DirectoryInfo tempDir = CreateTempDir(GetType().Name);
- Directory[] dirs = new Directory[] { new RAMDirectory(), new SimpleFSDirectory(tempDir), new NIOFSDirectory(tempDir) };
+ Directory[] dirs = new Directory[]
+ {
+ new RAMDirectory(),
+ new SimpleFSDirectory(tempDir),
+ new NIOFSDirectory(tempDir)
+ };
foreach (Directory dir in dirs)
{
@@ -62,7 +67,12 @@ public virtual void TestDetectClose()
public virtual void TestDoubleDispose()
{
DirectoryInfo tempDir = CreateTempDir(GetType().Name);
- Directory[] dirs = new Directory[] { new RAMDirectory(), new SimpleFSDirectory(tempDir), new NIOFSDirectory(tempDir) };
+ Directory[] dirs = new Directory[]
+ {
+ new RAMDirectory(),
+ new SimpleFSDirectory(tempDir),
+ new NIOFSDirectory(tempDir)
+ };
foreach (Directory dir in dirs)
{
@@ -120,7 +130,8 @@ public override void Run()
try
{
- using (IndexOutput output = outerBDWrapper.CreateOutput(fileName, NewIOContext(Random))) { }
+ // LUCENENET: using statement instead of manual close/Dispose call
+ using (IndexOutput _ = outerBDWrapper.CreateOutput(fileName, NewIOContext(Random))) { }
Assert.IsTrue(SlowFileExists(outerBDWrapper, fileName));
}
catch (Exception e) when (e.IsIOException())
@@ -347,7 +358,7 @@ public virtual void TestCopySubdir()
//(new File(path, "subdir")).mkdirs();
System.IO.Directory.CreateDirectory(new DirectoryInfo(Path.Combine(path.FullName, "subdir")).FullName);
Directory fsDir = new SimpleFSDirectory(path, null);
- Assert.AreEqual(0, (new RAMDirectory(fsDir, NewIOContext(Random))).ListAll().Length);
+ Assert.AreEqual(0, new RAMDirectory(fsDir, NewIOContext(Random)).ListAll().Length);
}
finally
{
@@ -526,4 +537,4 @@ private static void Search(Index.IndexReader r, int times)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs b/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs
index a4438d7e81..64f726998e 100644
--- a/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs
+++ b/src/Lucene.Net.Tests/Store/TestFileSwitchDirectory.cs
@@ -58,7 +58,9 @@ public virtual void TestBasic()
// for now we wire Lucene40Codec because we rely upon its specific impl
bool oldValue = OldFormatImpersonationIsActive;
OldFormatImpersonationIsActive = true;
- IndexWriter writer = new IndexWriter(fsd, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NewLogMergePolicy(false)).SetCodec(Codec.ForName("Lucene40")).SetUseCompoundFile(false));
+ IndexWriter writer = new IndexWriter(fsd,
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
+ .SetMergePolicy(NewLogMergePolicy(false)).SetCodec(Codec.ForName("Lucene40")).SetUseCompoundFile(false));
TestIndexWriterReader.CreateIndexNoClose(true, "ram", writer);
IndexReader reader = DirectoryReader.Open(writer, true);
Assert.AreEqual(100, reader.MaxDoc);
@@ -136,7 +138,7 @@ private static bool ContainsFile(Directory directory, string file) // LUCENENET
public virtual void TestDirectoryFilter()
{
Directory dir = NewFSSwitchDirectory(Collections.EmptySet());
- string name = "file";
+ const string name = "file"; // LUCENENET: made const
try
{
dir.CreateOutput(name, NewIOContext(Random)).Dispose();
@@ -187,4 +189,4 @@ private void CreateSequenceFile(Directory dir, string name, sbyte start, int siz
os.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs b/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs
index 8c23639acd..3c1877c2db 100644
--- a/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs
+++ b/src/Lucene.Net.Tests/Store/TestHugeRamFile.cs
@@ -29,7 +29,8 @@ namespace Lucene.Net.Store
[TestFixture]
public class TestHugeRamFile : LuceneTestCase
{
- private static readonly long MAX_VALUE = (long)2 * (long)int.MaxValue;
+ // LUCENENET: made const, using long literal instead of cast to long
+ private const long MAX_VALUE = 2L * int.MaxValue;
///
/// Fake a huge ram file by using the same byte buffer for all
@@ -121,4 +122,4 @@ public virtual void TestHugeFile()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestLock.cs b/src/Lucene.Net.Tests/Store/TestLock.cs
index d00075fd1b..4409611dd7 100644
--- a/src/Lucene.Net.Tests/Store/TestLock.cs
+++ b/src/Lucene.Net.Tests/Store/TestLock.cs
@@ -1,6 +1,5 @@
using NUnit.Framework;
using System;
-using System.IO;
using Assert = Lucene.Net.TestFramework.Assert;
namespace Lucene.Net.Store
@@ -30,7 +29,7 @@ public class TestLock : LuceneTestCase
[Test]
public virtual void TestObtain()
{
- LockMock @lock = new LockMock(this);
+ LockMock @lock = new LockMock();
Lock.LOCK_POLL_INTERVAL = 10;
try
@@ -46,13 +45,6 @@ public virtual void TestObtain()
private class LockMock : Lock
{
- private readonly TestLock outerInstance;
-
- public LockMock(TestLock outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public int LockAttempts;
public override bool Obtain()
@@ -72,4 +64,4 @@ public override bool IsLocked()
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestLockFactory.cs b/src/Lucene.Net.Tests/Store/TestLockFactory.cs
index 98b576f38a..70d8c1d554 100644
--- a/src/Lucene.Net.Tests/Store/TestLockFactory.cs
+++ b/src/Lucene.Net.Tests/Store/TestLockFactory.cs
@@ -1,7 +1,6 @@
using J2N.Threading;
using Lucene.Net.Documents;
using Lucene.Net.Index.Extensions;
-using Lucene.Net.Support.Threading;
using NUnit.Framework;
using System;
using System.Collections.Generic;
@@ -52,7 +51,7 @@ public class TestLockFactory : LuceneTestCase
public virtual void TestCustomLockFactory()
{
Directory dir = new MockDirectoryWrapper(Random, new RAMDirectory());
- MockLockFactory lf = new MockLockFactory(this);
+ MockLockFactory lf = new MockLockFactory();
dir.SetLockFactory(lf);
// Lock prefix should have been set:
@@ -175,12 +174,12 @@ public virtual void _testStressLocks(LockFactory lockFactory, DirectoryInfo inde
Directory dir = NewFSDirectory(indexDir, lockFactory);
// First create a 1 doc index:
- IndexWriter w = new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetOpenMode(OpenMode.CREATE));
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE));
AddDoc(w);
w.Dispose();
WriterThread writer = new WriterThread(this, 100, dir);
- SearcherThread searcher = new SearcherThread(this, 100, dir);
+ SearcherThread searcher = new SearcherThread(100, dir);
writer.Start();
searcher.Start();
@@ -233,7 +232,7 @@ public virtual void TestNativeFSLockFactoryLockExists()
var lockFile = new FileInfo(Path.Combine(tempDir.FullName, "test.lock"));
using (lockFile.Create()){};
- var l = (new NativeFSLockFactory(tempDir)).MakeLock("test.lock");
+ var l = new NativeFSLockFactory(tempDir).MakeLock("test.lock");
Assert.IsTrue(l.Obtain(), "failed to obtain lock, got exception: {0}", l.FailureReason);
l.Dispose();
Assert.IsFalse(l.IsLocked(), "failed to release lock, got exception: {0}", l.FailureReason);
@@ -310,7 +309,7 @@ public override void Run()
{
try
{
- writer = new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetOpenMode(OpenMode.APPEND));
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND));
}
catch (Exception e) when (e.IsIOException())
{
@@ -369,15 +368,12 @@ public override void Run()
private class SearcherThread : ThreadJob
{
- private readonly TestLockFactory outerInstance;
-
private readonly Directory dir;
private readonly int numIteration;
public bool HitException { get; private set; } = false;
- public SearcherThread(TestLockFactory outerInstance, int numIteration, Directory dir)
+ public SearcherThread(int numIteration, Directory dir)
{
- this.outerInstance = outerInstance;
this.numIteration = numIteration;
this.dir = dir;
}
@@ -430,13 +426,6 @@ public override void Run()
public class MockLockFactory : LockFactory
{
- private readonly TestLockFactory outerInstance;
-
- public MockLockFactory(TestLockFactory outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public bool LockPrefixSet;
public IDictionary LocksCreated = /*CollectionsHelper.SynchronizedMap(*/new Dictionary()/*)*/;
public int MakeLockCount = 0;
@@ -454,7 +443,7 @@ public override Lock MakeLock(string lockName)
{
lock (this)
{
- Lock @lock = new MockLock(this);
+ Lock @lock = new MockLock();
LocksCreated[lockName] = @lock;
MakeLockCount++;
return @lock;
@@ -467,13 +456,6 @@ public override void ClearLock(string specificLockName)
public class MockLock : Lock
{
- private readonly TestLockFactory.MockLockFactory outerInstance;
-
- public MockLock(TestLockFactory.MockLockFactory outerInstance)
- {
- this.outerInstance = outerInstance;
- }
-
public int LockAttempts;
public override bool Obtain()
@@ -501,4 +483,4 @@ private void AddDoc(IndexWriter writer)
writer.AddDocument(doc);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestMockDirectoryWrapper.cs b/src/Lucene.Net.Tests/Store/TestMockDirectoryWrapper.cs
index 7735dc40f5..dd2cdb537a 100644
--- a/src/Lucene.Net.Tests/Store/TestMockDirectoryWrapper.cs
+++ b/src/Lucene.Net.Tests/Store/TestMockDirectoryWrapper.cs
@@ -1,6 +1,5 @@
using NUnit.Framework;
using System;
-using System.IO;
using Assert = Lucene.Net.TestFramework.Assert;
namespace Lucene.Net.Store
@@ -111,4 +110,4 @@ public void TestDiskFull()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestMultiMMap.cs b/src/Lucene.Net.Tests/Store/TestMultiMMap.cs
index 0318c1f148..426f9bbc81 100644
--- a/src/Lucene.Net.Tests/Store/TestMultiMMap.cs
+++ b/src/Lucene.Net.Tests/Store/TestMultiMMap.cs
@@ -426,7 +426,7 @@ public void TestDisposeIndexInput()
File.WriteAllText(fileName, string.Empty, new UTF8Encoding(encoderShouldEmitUTF8Identifier: false) /* No BOM */);
MMapDirectory mmapDir = new MMapDirectory(dir);
- using (var indexInput = mmapDir.OpenInput(name, NewIOContext(Random)))
+ using (var _ = mmapDir.OpenInput(name, NewIOContext(Random)))
{
} // Dispose
@@ -434,4 +434,4 @@ public void TestDisposeIndexInput()
File.Delete(fileName);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs b/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs
index 900da936b4..8e097ef569 100644
--- a/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs
+++ b/src/Lucene.Net.Tests/Store/TestNRTCachingDirectory.cs
@@ -127,7 +127,7 @@ public virtual void VerifyCompiles()
Directory fsDir = FSDirectory.Open(new DirectoryInfo("/path/to/index"));
NRTCachingDirectory cachedFSDir = new NRTCachingDirectory(fsDir, 2.0, 25.0);
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
- IndexWriter writer = new IndexWriter(cachedFSDir, conf);
+ IndexWriter _ = new IndexWriter(cachedFSDir, conf); // LUCENENET: discarding unused variable, was `writer`
}
[Test]
@@ -232,4 +232,4 @@ private void CreateSequenceFile(Directory dir, string name, sbyte start, int siz
os.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs b/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs
index 58d11a2e9b..730f3dc017 100644
--- a/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs
+++ b/src/Lucene.Net.Tests/Store/TestRAMDirectory.cs
@@ -1,7 +1,6 @@
using J2N.Threading;
using Lucene.Net.Documents;
using Lucene.Net.Index.Extensions;
-using Lucene.Net.Support.IO;
using NUnit.Framework;
using System;
using System.Collections.Generic;
@@ -63,7 +62,7 @@ public override void SetUp()
indexDir = new DirectoryInfo(Path.Combine(tempDir, "RAMDirIndex"));
Directory dir = NewFSDirectory(indexDir);
- IndexWriter writer = new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetOpenMode(OpenMode.CREATE));
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE));
// add some documents
Document doc = null;
for (int i = 0; i < docsToAdd; i++)
@@ -117,7 +116,7 @@ public virtual void TestRAMDirectorySize()
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(Random, new RAMDirectory(dir, NewIOContext(Random)));
dir.Dispose();
- IndexWriter writer = new IndexWriter(ramDir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetOpenMode(OpenMode.APPEND));
+ IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND));
writer.ForceMerge(1);
Assert.AreEqual(ramDir.GetSizeInBytes(), ramDir.GetRecomputedSizeInBytes());
@@ -259,4 +258,4 @@ public virtual void TestSeekToEOFThenBack()
dir.Dispose();
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Support/BigObject.cs b/src/Lucene.Net.Tests/Support/BigObject.cs
deleted file mode 100644
index 20622d0fcc..0000000000
--- a/src/Lucene.Net.Tests/Support/BigObject.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-namespace Lucene.Net.Support
-{
- /*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
- internal class BigObject
- {
- public int i = 0;
- public byte[] buf = null;
-
- public BigObject(int i)
- {
- this.i = i;
- buf = new byte[1024 * 1024]; //1MB
- }
- }
-}
diff --git a/src/Lucene.Net.Tests/Support/Codecs/TestDefaultDocValuesFormatFactory.cs b/src/Lucene.Net.Tests/Support/Codecs/TestDefaultDocValuesFormatFactory.cs
index d4e086dce4..bc3cc3c336 100644
--- a/src/Lucene.Net.Tests/Support/Codecs/TestDefaultDocValuesFormatFactory.cs
+++ b/src/Lucene.Net.Tests/Support/Codecs/TestDefaultDocValuesFormatFactory.cs
@@ -1,7 +1,6 @@
using Lucene.Net.Util;
using NUnit.Framework;
using System;
-using System.Reflection;
using Lucene.Net.Index;
namespace Lucene.Net.Codecs
@@ -34,7 +33,6 @@ public void TestScanLucene()
assertEquals(7, docValuesFormats.Count);
-
assertTrue(docValuesFormats.Contains("Lucene45"));
assertTrue(docValuesFormats.Contains("Lucene42"));
assertTrue(docValuesFormats.Contains("Lucene40"));
diff --git a/src/Lucene.Net.Tests/Support/Codecs/TestDefaultPostingsFormatFactory.cs b/src/Lucene.Net.Tests/Support/Codecs/TestDefaultPostingsFormatFactory.cs
index 34da52fb17..72e3deddee 100644
--- a/src/Lucene.Net.Tests/Support/Codecs/TestDefaultPostingsFormatFactory.cs
+++ b/src/Lucene.Net.Tests/Support/Codecs/TestDefaultPostingsFormatFactory.cs
@@ -1,7 +1,6 @@
using Lucene.Net.Util;
using NUnit.Framework;
using System;
-using System.Reflection;
using Lucene.Net.Index;
namespace Lucene.Net.Codecs
diff --git a/src/Lucene.Net.Tests/Support/CollisionTester.cs b/src/Lucene.Net.Tests/Support/CollisionTester.cs
deleted file mode 100644
index b63d3fc5dc..0000000000
--- a/src/Lucene.Net.Tests/Support/CollisionTester.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-namespace Lucene.Net.Support
-{
- /*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
- internal class CollisionTester
- {
- int id;
- int hashCode;
-
- public CollisionTester(int id, int hashCode)
- {
- this.id = id;
- this.hashCode = hashCode;
- }
-
- public override int GetHashCode()
- {
- return hashCode;
- }
-
- public override bool Equals(object obj)
- {
- if (obj is CollisionTester)
- {
- return this.id == ((CollisionTester)obj).id;
- }
- else
- return base.Equals(obj);
- }
- }
-}
diff --git a/src/Lucene.Net.Tests/Support/Diagnostics/TestDebugging.cs b/src/Lucene.Net.Tests/Support/Diagnostics/TestDebugging.cs
index bc43a45ec9..2476140908 100644
--- a/src/Lucene.Net.Tests/Support/Diagnostics/TestDebugging.cs
+++ b/src/Lucene.Net.Tests/Support/Diagnostics/TestDebugging.cs
@@ -96,7 +96,7 @@ public void TestConditionFalse()
}
[Test, LuceneNetSpecific]
- [TestCaseSource(typeof(TestDebugging), "GetMessageFormatCases", new object[] { 1 })]
+ [TestCaseSource(typeof(TestDebugging), nameof(GetMessageFormatCases), new object[] { 1 })]
public void TestMessageFormatting_1Parameter(string expectedMessage, string messageFormat, object p0)
{
TestWithAsserts(enabled: true, () =>
@@ -107,7 +107,7 @@ public void TestMessageFormatting_1Parameter(string expectedMessage, string mess
}
[Test, LuceneNetSpecific]
- [TestCaseSource(typeof(TestDebugging), "GetMessageFormatCases", new object[] { 2 })]
+ [TestCaseSource(typeof(TestDebugging), nameof(GetMessageFormatCases), new object[] { 2 })]
public void TestMessageFormatting_2Parameters(string expectedMessage, string messageFormat, object p0, object p1)
{
TestWithAsserts(enabled: true, () =>
@@ -118,7 +118,7 @@ public void TestMessageFormatting_2Parameters(string expectedMessage, string mes
}
[Test, LuceneNetSpecific]
- [TestCaseSource(typeof(TestDebugging), "GetMessageFormatCases", new object[] { 3 })]
+ [TestCaseSource(typeof(TestDebugging), nameof(GetMessageFormatCases), new object[] { 3 })]
public void TestMessageFormatting_3Parameters(string expectedMessage, string messageFormat, object p0, object p1, object p2)
{
TestWithAsserts(enabled: true, () =>
@@ -129,7 +129,7 @@ public void TestMessageFormatting_3Parameters(string expectedMessage, string mes
}
[Test, LuceneNetSpecific]
- [TestCaseSource(typeof(TestDebugging), "GetMessageFormatCases", new object[] { 4 })]
+ [TestCaseSource(typeof(TestDebugging), nameof(GetMessageFormatCases), new object[] { 4 })]
public void TestMessageFormatting_4Parameters(string expectedMessage, string messageFormat, object p0, object p1, object p2, object p3)
{
TestWithAsserts(enabled: true, () =>
@@ -140,7 +140,7 @@ public void TestMessageFormatting_4Parameters(string expectedMessage, string mes
}
[Test, LuceneNetSpecific]
- [TestCaseSource(typeof(TestDebugging), "GetMessageFormatCases", new object[] { 5 })]
+ [TestCaseSource(typeof(TestDebugging), nameof(GetMessageFormatCases), new object[] { 5 })]
public void TestMessageFormatting_5Parameters(string expectedMessage, string messageFormat, object p0, object p1, object p2, object p3, object p4)
{
TestWithAsserts(enabled: true, () =>
@@ -151,7 +151,7 @@ public void TestMessageFormatting_5Parameters(string expectedMessage, string mes
}
[Test, LuceneNetSpecific]
- [TestCaseSource(typeof(TestDebugging), "GetMessageFormatCases", new object[] { 6 })]
+ [TestCaseSource(typeof(TestDebugging), nameof(GetMessageFormatCases), new object[] { 6 })]
public void TestMessageFormatting_6Parameters(string expectedMessage, string messageFormat, object p0, object p1, object p2, object p3, object p4, object p5)
{
TestWithAsserts(enabled: true, () =>
@@ -162,7 +162,7 @@ public void TestMessageFormatting_6Parameters(string expectedMessage, string mes
}
[Test, LuceneNetSpecific]
- [TestCaseSource(typeof(TestDebugging), "GetMessageFormatCases", new object[] { 7 })]
+ [TestCaseSource(typeof(TestDebugging), nameof(GetMessageFormatCases), new object[] { 7 })]
public void TestMessageFormatting_7Parameters(string expectedMessage, string messageFormat, object p0, object p1, object p2, object p3, object p4, object p5, object p6)
{
TestWithAsserts(enabled: true, () =>
diff --git a/src/Lucene.Net.Tests/Support/Document/Extensions/TestDocumentExtensions.cs b/src/Lucene.Net.Tests/Support/Document/Extensions/TestDocumentExtensions.cs
index 4c1f0ecc88..5ce9699bb2 100644
--- a/src/Lucene.Net.Tests/Support/Document/Extensions/TestDocumentExtensions.cs
+++ b/src/Lucene.Net.Tests/Support/Document/Extensions/TestDocumentExtensions.cs
@@ -39,7 +39,7 @@ public void TestGetField()
new BinaryDocValuesField("someOtherName", new BytesRef("Foobar2")),
target
};
-
+
BinaryDocValuesField field = document.GetField("theName");
Assert.AreSame(target, field);
@@ -95,7 +95,7 @@ public void TestAddBinaryDocValuesField()
public void TestAddDoubleDocValuesField()
{
DoubleDocValuesField field = null;
- double value = 123.456d;
+ const double value = 123.456d;
AssertDocumentExtensionAddsToDocument(document => field = document.AddDoubleDocValuesField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(J2N.BitConversion.DoubleToRawInt64Bits(value), field.GetDoubleValueOrDefault());
@@ -106,8 +106,8 @@ public void TestAddDoubleDocValuesField()
public void TestAddDoubleField_Stored()
{
DoubleField field = null;
- double value = 123.456d;
- var stored = Field.Store.YES;
+ const double value = 123.456d;
+ const Field.Store stored = Field.Store.YES;
AssertDocumentExtensionAddsToDocument(document => field = document.AddDoubleField("theName", value, stored));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetDoubleValueOrDefault(), 0.0000001d); // We don't really care about precision, just checking to see if the value got passed through
@@ -119,7 +119,7 @@ public void TestAddDoubleField_Stored()
public void TestAddDoubleField_FieldType()
{
DoubleField field = null;
- double value = 123.456d;
+ const double value = 123.456d;
var fieldType = new FieldType
{
IsIndexed = true,
@@ -140,7 +140,7 @@ public void TestAddDoubleField_FieldType()
public void TestAddSingleDocValuesField()
{
SingleDocValuesField field = null;
- float value = 123.456f;
+ const float value = 123.456f;
AssertDocumentExtensionAddsToDocument(document => field = document.AddSingleDocValuesField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(J2N.BitConversion.SingleToRawInt32Bits(value), field.GetSingleValueOrDefault());
@@ -151,8 +151,8 @@ public void TestAddSingleDocValuesField()
public void TestAddSingleField_Stored()
{
SingleField field = null;
- float value = 123.456f;
- var stored = Field.Store.YES;
+ const float value = 123.456f;
+ const Field.Store stored = Field.Store.YES;
AssertDocumentExtensionAddsToDocument(document => field = document.AddSingleField("theName", value, stored));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetSingleValueOrDefault(), 0.0000001f); // We don't really care about precision, just checking to see if the value got passed through
@@ -164,7 +164,7 @@ public void TestAddSingleField_Stored()
public void TestAddSingleField_FieldType()
{
SingleField field = null;
- float value = 123.456f;
+ const float value = 123.456f;
var fieldType = new FieldType
{
IsIndexed = true,
@@ -187,8 +187,8 @@ public void TestAddSingleField_FieldType()
public void TestAddInt32Field_Stored()
{
Int32Field field = null;
- int value = 123;
- var stored = Field.Store.YES;
+ const int value = 123;
+ const Field.Store stored = Field.Store.YES;
AssertDocumentExtensionAddsToDocument(document => field = document.AddInt32Field("theName", value, stored));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetInt32ValueOrDefault());
@@ -200,7 +200,7 @@ public void TestAddInt32Field_Stored()
public void TestAddInt32Field_FieldType()
{
Int32Field field = null;
- int value = 123;
+ const int value = 123;
var fieldType = new FieldType
{
IsIndexed = true,
@@ -223,8 +223,8 @@ public void TestAddInt32Field_FieldType()
public void TestAddInt64Field_Stored()
{
Int64Field field = null;
- long value = 123;
- var stored = Field.Store.YES;
+ const long value = 123;
+ const Field.Store stored = Field.Store.YES;
AssertDocumentExtensionAddsToDocument(document => field = document.AddInt64Field("theName", value, stored));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetInt64ValueOrDefault());
@@ -236,7 +236,7 @@ public void TestAddInt64Field_Stored()
public void TestAddInt64Field_FieldType()
{
Int64Field field = null;
- long value = 123;
+ const long value = 123;
var fieldType = new FieldType
{
IsIndexed = true,
@@ -257,7 +257,7 @@ public void TestAddInt64Field_FieldType()
public void TestAddNumericDocValuesField()
{
NumericDocValuesField field = null;
- long value = 123;
+ const long value = 123;
AssertDocumentExtensionAddsToDocument(document => field = document.AddNumericDocValuesField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetInt64ValueOrDefault());
@@ -293,8 +293,8 @@ public void TestAddStoredField_ByteArray_WithOffset()
{
StoredField field = null;
byte[] bytes = Encoding.UTF8.GetBytes("FoobarAgain");
- int offset = 3;
- int length = 3;
+ const int offset = 3;
+ const int length = 3;
AssertDocumentExtensionAddsToDocument(document => field = document.AddStoredField("theName", bytes, offset, length));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(bytes, field.GetBinaryValue().Bytes);
@@ -318,7 +318,7 @@ public void TestAddStoredField_BytesRef()
public void TestAddStoredField_String()
{
StoredField field = null;
- string value = "Foobar";
+ const string value = "Foobar";
AssertDocumentExtensionAddsToDocument(document => field = document.AddStoredField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetStringValue());
@@ -329,7 +329,7 @@ public void TestAddStoredField_String()
public void TestAddStoredField_Int32()
{
StoredField field = null;
- int value = 123;
+ const int value = 123;
AssertDocumentExtensionAddsToDocument(document => field = document.AddStoredField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetInt32ValueOrDefault());
@@ -340,7 +340,7 @@ public void TestAddStoredField_Int32()
public void TestAddStoredField_Single()
{
StoredField field = null;
- float value = 123.456f;
+ const float value = 123.456f;
AssertDocumentExtensionAddsToDocument(document => field = document.AddStoredField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetSingleValueOrDefault(), 0.0000001f); // We don't really care about precision, just checking to see if the value got passed through
@@ -351,7 +351,7 @@ public void TestAddStoredField_Single()
public void TestAddStoredField_Int64()
{
StoredField field = null;
- long value = 123;
+ const long value = 123;
AssertDocumentExtensionAddsToDocument(document => field = document.AddStoredField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetInt64ValueOrDefault());
@@ -362,7 +362,7 @@ public void TestAddStoredField_Int64()
public void TestAddStoredField_Double()
{
StoredField field = null;
- double value = 123.456d;
+ const double value = 123.456d;
AssertDocumentExtensionAddsToDocument(document => field = document.AddStoredField("theName", value));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetDoubleValueOrDefault(), 0.0000001d); // We don't really care about precision, just checking to see if the value got passed through
@@ -373,7 +373,7 @@ public void TestAddStoredField_Double()
public void TestAddStringField()
{
StringField field = null;
- string value = "Foobar";
+ const string value = "Foobar";
AssertDocumentExtensionAddsToDocument(document => field = document.AddStringField("theName", value, Field.Store.YES));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetStringValue());
@@ -396,7 +396,7 @@ public void TestAddTextField_TextReader()
public void TestAddTextField_Stored()
{
TextField field = null;
- string value = "Foobar";
+ const string value = "Foobar";
AssertDocumentExtensionAddsToDocument(document => field = document.AddTextField("theName", value, Field.Store.YES));
Assert.AreEqual("theName", field.Name);
Assert.AreEqual(value, field.GetStringValue());
@@ -426,4 +426,4 @@ private void AssertDocumentExtensionAddsToDocument(Func extensio
Assert.Throws(() => extension(document));
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Support/IO/TestFileSupport.cs b/src/Lucene.Net.Tests/Support/IO/TestFileSupport.cs
index 4cce353a75..fe3463ce24 100644
--- a/src/Lucene.Net.Tests/Support/IO/TestFileSupport.cs
+++ b/src/Lucene.Net.Tests/Support/IO/TestFileSupport.cs
@@ -27,7 +27,7 @@ namespace Lucene.Net.Support.IO
public class TestFileSupport : LuceneTestCase
{
- private static String platformId = RuntimeInformation.FrameworkDescription.Replace('.', '-');
+ private readonly static string platformId = RuntimeInformation.FrameworkDescription.Replace('.', '-');
/** Location to store tests in */
private DirectoryInfo tempDirectory;
@@ -68,15 +68,15 @@ public void TestCreateRandomFileAsStream()
using (var file2 = FileSupport.CreateTempFileAsStream("foo", "bar", dir, new FileStreamOptions { Access = FileAccess.Write }))
{
Assert.AreNotEqual(file1.Name, file2.Name);
- }
+ }
}
[Test, LuceneNetSpecific]
public void TestGetCanonicalPath()
{
// Should work for Unix/Windows.
- String dots = "..";
- String @base = tempDirectory.GetCanonicalPath();
+ const string dots = "..";
+ string @base = tempDirectory.GetCanonicalPath();
@base = addTrailingSlash(@base);
FileInfo f = new FileInfo(Path.Combine(@base, "temp.tst"));
@@ -126,13 +126,13 @@ public void TestGetCanonicalPath()
DirectoryInfo testdir = new DirectoryInfo(Path.Combine(@base, "long-" + platformId));
testdir.Create();
FileInfo f1 = new FileInfo(Path.Combine(testdir.FullName, "longfilename" + platformId + ".tst"));
- using (FileStream fos = new FileStream(f1.FullName, FileMode.CreateNew, FileAccess.Write))
+ using (FileStream _ = new FileStream(f1.FullName, FileMode.CreateNew, FileAccess.Write))
{ }
FileInfo f2 = null, f3 = null;
DirectoryInfo dir2 = null;
try
{
- String dirName1 = f1.GetCanonicalPath();
+ string dirName1 = f1.GetCanonicalPath();
FileInfo f4 = new FileInfo(Path.Combine(testdir.FullName, "longfi~1.tst"));
/*
* If the "short file name" doesn't exist, then assume that the
@@ -140,7 +140,7 @@ public void TestGetCanonicalPath()
*/
if (f4.Exists)
{
- String dirName2 = f4.GetCanonicalPath();
+ string dirName2 = f4.GetCanonicalPath();
assertEquals("Test 6: Incorrect Path Returned.", dirName1,
dirName2);
dir2 = new DirectoryInfo(Path.Combine(testdir.FullName, "longdirectory" + platformId));
@@ -163,7 +163,7 @@ public void TestGetCanonicalPath()
f2 = new FileInfo(testdir.FullName + Path.DirectorySeparatorChar + "longdirectory"
+ platformId + Path.DirectorySeparatorChar + "Test" + Path.DirectorySeparatorChar + dots
+ Path.DirectorySeparatorChar + "longfilename.tst");
- using (FileStream fos2 = new FileStream(f2.FullName, FileMode.CreateNew, FileAccess.Write))
+ using (FileStream _ = new FileStream(f2.FullName, FileMode.CreateNew, FileAccess.Write))
{ }
dirName1 = f2.GetCanonicalPath();
f3 = new FileInfo(testdir.FullName + Path.DirectorySeparatorChar + "longdi~1"
@@ -177,20 +177,14 @@ public void TestGetCanonicalPath()
finally
{
f1.Delete();
- if (f2 != null)
- {
- f2.Delete();
- }
- if (dir2 != null)
- {
- dir2.Delete();
- }
+ f2?.Delete();
+ dir2?.Delete();
testdir.Delete();
}
}
}
- private static String addTrailingSlash(String path)
+ private static string addTrailingSlash(string path)
{
if (Path.DirectorySeparatorChar == path[path.Length - 1])
{
@@ -205,8 +199,8 @@ public void TestGetCanonicalPathDriveLetterNormalization()
bool onWindows = (Path.DirectorySeparatorChar == '\\');
if (onWindows)
{
- var path = @"f:\testing\on\Windows";
- var expected = @"F:\testing\on\Windows";
+ const string path = @"f:\testing\on\Windows";
+ const string expected = @"F:\testing\on\Windows";
var dir = new DirectoryInfo(path);
diff --git a/src/Lucene.Net.Tests/Support/SmallObject.cs b/src/Lucene.Net.Tests/Support/SmallObject.cs
deleted file mode 100644
index f255145e74..0000000000
--- a/src/Lucene.Net.Tests/Support/SmallObject.cs
+++ /dev/null
@@ -1,29 +0,0 @@
-namespace Lucene.Net.Support
-{
- /*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
- internal class SmallObject
- {
- public int i = 0;
-
- public SmallObject(int i)
- {
- this.i = i;
- }
- }
-}
diff --git a/src/Lucene.Net.Tests/Support/TestCRC32.cs b/src/Lucene.Net.Tests/Support/TestCRC32.cs
index b0e352676d..9be6dabc6e 100644
--- a/src/Lucene.Net.Tests/Support/TestCRC32.cs
+++ b/src/Lucene.Net.Tests/Support/TestCRC32.cs
@@ -1,6 +1,5 @@
using Lucene.Net.Attributes;
using NUnit.Framework;
-using System;
using Assert = Lucene.Net.TestFramework.Assert;
namespace Lucene.Net.Support
@@ -25,8 +24,6 @@ namespace Lucene.Net.Support
[TestFixture]
public class TestCRC32
{
- ///
- ///
[Test, LuceneNetSpecific]
public virtual void TestCRC32_()
{
@@ -37,8 +34,8 @@ public virtual void TestCRC32_()
IChecksum digest = new CRC32();
digest.Update(b, 0, b.Length);
- Int64 expected = 688229491;
+ const long expected = 688229491;
Assert.AreEqual(expected, digest.Value);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Support/TestCase.cs b/src/Lucene.Net.Tests/Support/TestCase.cs
deleted file mode 100644
index 2ce1a8613f..0000000000
--- a/src/Lucene.Net.Tests/Support/TestCase.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-namespace Lucene.Net
-{
- /*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
- ///
- /// Support for junit.framework.TestCase.getName().
- /// {{Lucene.Net-2.9.1}} Move to another location after LUCENENET-266
- ///
- public static class TestCase // LUCENENET specific: CA1052 Static holder types should be Static or NotInheritable
- {
- public static string GetName()
- {
- return GetTestCaseName(false);
- }
-
- public static string GetFullName()
- {
- return GetTestCaseName(true);
- }
-
- static string GetTestCaseName(bool fullName)
- {
- System.Diagnostics.StackTrace stackTrace = new System.Diagnostics.StackTrace();
- for (int i = 0; i < stackTrace.FrameCount; i++)
- {
- System.Reflection.MethodBase method = stackTrace.GetFrame(i).GetMethod();
- object[] testAttrs = method.GetCustomAttributes(typeof(NUnit.Framework.TestAttribute), false);
- if (testAttrs != null && testAttrs.Length > 0)
- if (fullName) return method.DeclaringType.FullName + "." + method.Name;
- else return method.Name;
- }
- return "GetTestCaseName[UnknownTestMethod]";
- }
- }
-}
diff --git a/src/Lucene.Net.Tests/Support/TestEnumerableExtensions.cs b/src/Lucene.Net.Tests/Support/TestEnumerableExtensions.cs
index dd99e5f348..46a70350c6 100644
--- a/src/Lucene.Net.Tests/Support/TestEnumerableExtensions.cs
+++ b/src/Lucene.Net.Tests/Support/TestEnumerableExtensions.cs
@@ -1,8 +1,6 @@
using Lucene.Net.Attributes;
using Lucene.Net.Util;
using NUnit.Framework;
-using System;
-using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Assert = Lucene.Net.TestFramework.Assert;
diff --git a/src/Lucene.Net.Tests/Support/TestIDisposable.cs b/src/Lucene.Net.Tests/Support/TestIDisposable.cs
index 3af48634d3..9330a11092 100644
--- a/src/Lucene.Net.Tests/Support/TestIDisposable.cs
+++ b/src/Lucene.Net.Tests/Support/TestIDisposable.cs
@@ -43,8 +43,8 @@ public void TestReadersWriters()
IndexWriter writer;
IndexReader reader;
IndexWriterConfig conf = new IndexWriterConfig(
- Util.LuceneVersion.LUCENE_CURRENT,
- new WhitespaceAnalyzer(Util.LuceneVersion.LUCENE_CURRENT));
+ LuceneVersion.LUCENE_CURRENT,
+ new WhitespaceAnalyzer(LuceneVersion.LUCENE_CURRENT));
using (writer = new IndexWriter(dir, conf /*new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED)*/))
{
@@ -60,7 +60,7 @@ public void TestReadersWriters()
Assert.Throws(() => reader.RemoveReaderDisposedListener(null), "IndexReader shouldn't be open here");
}
-
+
Assert.Throws(() => writer.AddDocument(doc), "IndexWriter shouldn't be open here");
Assert.IsTrue(dir.IsOpen, "RAMDirectory");
diff --git a/src/Lucene.Net.Tests/Support/TestOSClass.cs b/src/Lucene.Net.Tests/Support/TestOSClass.cs
index b7e7f88790..193a7d7dc2 100644
--- a/src/Lucene.Net.Tests/Support/TestOSClass.cs
+++ b/src/Lucene.Net.Tests/Support/TestOSClass.cs
@@ -29,17 +29,17 @@ public class TestOSClass
public void TestFSDirectorySync()
{
DirectoryInfo path = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "testsync"));
- Lucene.Net.Store.Directory directory = new Lucene.Net.Store.SimpleFSDirectory(path, null);
+ Lucene.Net.Store.Directory directory = new Store.SimpleFSDirectory(path, null);
try
{
- Lucene.Net.Store.IndexOutput io = directory.CreateOutput("syncfile", new Store.IOContext());
+ Store.IndexOutput io = directory.CreateOutput("syncfile", new Store.IOContext());
io.Dispose();
directory.Sync(new string[] { "syncfile" });
}
finally
{
directory.Dispose();
- Lucene.Net.Util.TestUtil.Rm(path);
+ Util.TestUtil.Rm(path);
}
}
}
diff --git a/src/Lucene.Net.Tests/Support/TestOldPatches.cs b/src/Lucene.Net.Tests/Support/TestOldPatches.cs
index 8398cae8fb..dda06535aa 100644
--- a/src/Lucene.Net.Tests/Support/TestOldPatches.cs
+++ b/src/Lucene.Net.Tests/Support/TestOldPatches.cs
@@ -1,6 +1,5 @@
using Lucene.Net.Analysis;
using Lucene.Net.Index;
-using Lucene.Net.Search;
using Lucene.Net.Util;
using NUnit.Framework;
using System.IO;
@@ -64,13 +63,13 @@ public class TestOldPatches : LuceneTestCase
// queryPreSerialized.Add(new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("country", "Russia")), Occur.MUST);
// queryPreSerialized.Add(new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("country", "France")), Occur.MUST);
- // //now serialize it
+ // //now serialize it
// System.Runtime.Serialization.Formatters.Binary.BinaryFormatter serializer = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
// System.IO.MemoryStream memoryStream = new System.IO.MemoryStream();
//#pragma warning disable SYSLIB0011 // Type or member is obsolete (BinaryFormatter)
// serializer.Serialize(memoryStream, queryPreSerialized);
- // //now deserialize
+ // //now deserialize
// memoryStream.Seek(0, System.IO.SeekOrigin.Begin);
// Lucene.Net.Search.BooleanQuery queryPostSerialized = (Lucene.Net.Search.BooleanQuery)serializer.Deserialize(memoryStream);
//#pragma warning restore SYSLIB0011 // Type or member is obsolete (BinaryFormatter)
@@ -102,7 +101,7 @@ public class TestOldPatches : LuceneTestCase
// wr.AddDocument(doc);
// wr.Dispose();
- // //now serialize it
+ // //now serialize it
// System.Runtime.Serialization.Formatters.Binary.BinaryFormatter serializer = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
// System.IO.MemoryStream memoryStream = new System.IO.MemoryStream();
// serializer.Serialize(memoryStream, ramDIR);
@@ -111,7 +110,7 @@ public class TestOldPatches : LuceneTestCase
// ramDIR.Dispose();
// ramDIR = null;
- // //now deserialize
+ // //now deserialize
// memoryStream.Seek(0, System.IO.SeekOrigin.Begin);
// Lucene.Net.Store.RAMDirectory ramDIR2 = (Lucene.Net.Store.RAMDirectory)serializer.Deserialize(memoryStream);
@@ -145,10 +144,10 @@ public class TestOldPatches : LuceneTestCase
public void Test_Index_ReusableStringReader()
{
var conf = new IndexWriterConfig(Version.LUCENE_CURRENT, new TestAnalyzer());
- Lucene.Net.Index.IndexWriter wr = new Lucene.Net.Index.IndexWriter(new Lucene.Net.Store.RAMDirectory(), conf /*new TestAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED*/);
+ IndexWriter wr = new IndexWriter(new Store.RAMDirectory(), conf /*new TestAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED*/);
- Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
- Lucene.Net.Documents.Field f1 = new Lucene.Net.Documents.Field("f1", TEST_STRING, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED);
+ Documents.Document doc = new Documents.Document();
+ Documents.Field f1 = new Documents.Field("f1", TEST_STRING, Documents.Field.Store.YES, Documents.Field.Index.ANALYZED);
doc.Add(f1);
wr.AddDocument(doc);
@@ -157,7 +156,7 @@ public void Test_Index_ReusableStringReader()
private const string TEST_STRING = "First Line\nSecond Line";
- private class TestAnalyzer : Lucene.Net.Analysis.Analyzer
+ private class TestAnalyzer : Analyzer
{
public TestAnalyzer()
//: base(new TestReuseStrategy())
@@ -195,13 +194,13 @@ protected internal override TextReader InitReader(string fieldName, TextReader r
// }
//}
- private class TestTokenizer : Lucene.Net.Analysis.Tokenizer
+ private class TestTokenizer : Tokenizer
{
- public TestTokenizer(System.IO.TextReader reader)
+ public TestTokenizer(TextReader reader)
: base(reader)
{
- //Caution: "Reader" is actually of type "ReusableStringReader" and some
- //methods (for ex. "ReadToEnd", "Peek", "ReadLine") is not implemented.
+ //Caution: "Reader" is actually of type "ReusableStringReader" and some
+ //methods (for ex. "ReadToEnd", "Peek", "ReadLine") is not implemented.
Assert.AreEqual("ReusableStringReader", reader.GetType().Name);
Assert.AreEqual("First Line", reader.ReadLine(), "\"ReadLine\" method is not implemented");
@@ -233,7 +232,7 @@ public override sealed bool IncrementToken()
// Assert.IsFalse(reader.IsCurrent());
- // int resCount1 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
+ // int resCount1 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
// Assert.AreEqual(1, resCount1);
// writer.Commit();
@@ -257,7 +256,7 @@ public override sealed bool IncrementToken()
// LUCENENET TODO: Should IndexSearcher really implement MarshalByrefObj?
////-------------------------------------------
//int ANYPORT = 0;
- //[Test]
+ //[Test]
//[Description("LUCENENET-100")]
//public void Test_Search_FieldDoc()
//{
@@ -276,13 +275,13 @@ public override sealed bool IncrementToken()
// var reader = DirectoryReader.Open(LUCENENET_100_Dir);
// Lucene.Net.Search.IndexSearcher indexSearcher = new Lucene.Net.Search.IndexSearcher(reader);
// System.Runtime.Remoting.RemotingServices.Marshal(indexSearcher, "Searcher");
-
+
// LUCENENET_100_ClientSearch();
// //Wait Client to finish
// while (LUCENENET_100_testFinished == false) System.Threading.Thread.Sleep(10);
-
+
// if (LUCENENET_100_Exception != null) throw LUCENENET_100_Exception;
//}
diff --git a/src/Lucene.Net.Tests/Support/TestToStringUtils.cs b/src/Lucene.Net.Tests/Support/TestToStringUtils.cs
index 5c8ae79ca3..263b42c8c5 100644
--- a/src/Lucene.Net.Tests/Support/TestToStringUtils.cs
+++ b/src/Lucene.Net.Tests/Support/TestToStringUtils.cs
@@ -2,7 +2,9 @@
using Lucene.Net.Util;
using NUnit.Framework;
using System.Globalization;
+#if !FEATURE_CULTUREINFO_CURRENTCULTURE_SETTER
using System.Threading;
+#endif
namespace Lucene.Net.Support
{
@@ -25,13 +27,14 @@ namespace Lucene.Net.Support
///
/// This test was added for .NET compatibility - LUCENENET specific
- ///
+ ///
/// It tests the Lucene.Net.Util.ToStringUtils which was untested in the Java counterpart,
/// but required some help to ensure .NET compatibility.
///
public class TestToStringUtils : LuceneTestCase
{
- CultureInfo originalCulture;
+ private CultureInfo originalCulture;
+
public override void SetUp()
{
base.SetUp();
@@ -54,12 +57,12 @@ public override void TearDown()
[Test, LuceneNetSpecific]
public void TestBoost()
{
- float boostNormal = 1f;
- float boostFractional = 2.5f;
- float boostNonFractional = 5f;
- float boostLong = 1.111111111f;
- float boostZeroNonFractional = 0f;
- float boostZeroFractional = 0.123f;
+ const float boostNormal = 1f;
+ const float boostFractional = 2.5f;
+ const float boostNonFractional = 5f;
+ const float boostLong = 1.111111111f;
+ const float boostZeroNonFractional = 0f;
+ const float boostZeroFractional = 0.123f;
var cultures = CultureInfo.GetCultures(CultureTypes.SpecificCultures | CultureTypes.NeutralCultures);
diff --git a/src/Lucene.Net.Tests/Support/Threading/TestUninterruptableMonitor.cs b/src/Lucene.Net.Tests/Support/Threading/TestUninterruptableMonitor.cs
index 1a28eaefd6..ff82cb8323 100644
--- a/src/Lucene.Net.Tests/Support/Threading/TestUninterruptableMonitor.cs
+++ b/src/Lucene.Net.Tests/Support/Threading/TestUninterruptableMonitor.cs
@@ -31,9 +31,9 @@ namespace Lucene.Net.Support
[TestFixture]
public class TestUninterruptableMonitor : LuceneTestCase
{
- private class TransactionlThreadInterrupt : ThreadJob
+ private class TransactionalThreadInterrupt : ThreadJob
{
- private static AtomicInt32 transactionNumber = new AtomicInt32(0);
+ private readonly static AtomicInt32 transactionNumber = new AtomicInt32(0);
// Share locks between threads
private static readonly object lock1 = new object();
@@ -45,7 +45,7 @@ private class TransactionlThreadInterrupt : ThreadJob
internal volatile bool allowInterrupt = false;
internal volatile bool transactionInProgress = false;
-
+
public override void Run()
{
@@ -130,7 +130,7 @@ private void TransactionalMethod()
Console.WriteLine("sleeping...");
}
- // Use SpinWait instead of Sleep to demonstrate the
+ // Use SpinWait instead of Sleep to demonstrate the
// effect of calling Interrupt on a running thread.
Thread.SpinWait(1000000);
@@ -143,7 +143,7 @@ private void TransactionalMethod()
Console.WriteLine("sleeping...");
}
- // Use SpinWait instead of Sleep to demonstrate the
+ // Use SpinWait instead of Sleep to demonstrate the
// effect of calling Interrupt on a running thread.
Thread.SpinWait(1000000);
}
@@ -167,7 +167,7 @@ private void TransactionalMethod()
Console.WriteLine("sleeping...");
}
- // Use SpinWait instead of Sleep to demonstrate the
+ // Use SpinWait instead of Sleep to demonstrate the
// effect of calling Interrupt on a running thread.
Thread.SpinWait(1000000);
}
@@ -240,8 +240,8 @@ private string GetToStringFrom(Exception exception)
[Ignore("Lucene.NET does not support Thread.Interrupt(). See https://github.com/apache/lucenenet/issues/526.")]
public virtual void TestThreadInterrupt()
{
- TransactionlThreadInterrupt t = new TransactionlThreadInterrupt();
- t.IsBackground = (true);
+ TransactionalThreadInterrupt t = new TransactionalThreadInterrupt();
+ t.IsBackground = true;
t.Start();
// issue 300 interrupts to child thread
@@ -274,12 +274,12 @@ public virtual void TestThreadInterrupt()
[Ignore("Lucene.NET does not support Thread.Interrupt(). See https://github.com/apache/lucenenet/issues/526.")]
public virtual void TestTwoThreadsInterrupt()
{
- TransactionlThreadInterrupt t1 = new TransactionlThreadInterrupt();
- t1.IsBackground = (true);
+ TransactionalThreadInterrupt t1 = new TransactionalThreadInterrupt();
+ t1.IsBackground = true;
t1.Start();
- TransactionlThreadInterrupt t2 = new TransactionlThreadInterrupt();
- t2.IsBackground = (true);
+ TransactionalThreadInterrupt t2 = new TransactionalThreadInterrupt();
+ t2.IsBackground = true;
t2.Start();
// issue 300 interrupts to child thread
@@ -290,7 +290,7 @@ public virtual void TestTwoThreadsInterrupt()
// TODO: would be nice to also sometimes interrupt the
// CMS merge threads too ...
Thread.Sleep(10);
- TransactionlThreadInterrupt t = Random.NextBoolean() ? t1 : t2;
+ TransactionalThreadInterrupt t = Random.NextBoolean() ? t1 : t2;
if (t.allowInterrupt)
{
i++;
@@ -312,4 +312,4 @@ public virtual void TestTwoThreadsInterrupt()
Assert.IsFalse(t2.transactionInProgress);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/Lucene.Net.Tests/Util/TestNumericUtils.cs b/src/Lucene.Net.Tests/Util/TestNumericUtils.cs
index 3c2b3d4b4e..6cec59004a 100644
--- a/src/Lucene.Net.Tests/Util/TestNumericUtils.cs
+++ b/src/Lucene.Net.Tests/Util/TestNumericUtils.cs
@@ -1,4 +1,5 @@
-using J2N.Numerics;
+#if FEATURE_UTIL_TESTS
+using J2N.Numerics;
using J2N.Text;
using Lucene.Net.Support;
using NUnit.Framework;
@@ -6,6 +7,7 @@
using System;
using System.Collections.Generic;
using Assert = Lucene.Net.TestFramework.Assert;
+#endif
namespace Lucene.Net.Util
{
diff --git a/src/Lucene.Net/Support/ConcurrentDictionaryWrapper.cs b/src/Lucene.Net/Support/ConcurrentDictionaryWrapper.cs
deleted file mode 100644
index 278c4aaf19..0000000000
--- a/src/Lucene.Net/Support/ConcurrentDictionaryWrapper.cs
+++ /dev/null
@@ -1,240 +0,0 @@
-using J2N.Collections.Generic.Extensions;
-using System;
-using System.Collections.Generic;
-using System.Threading;
-
-namespace Lucene.Net.Support
-{
- /*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
- internal class ConcurrentDictionaryWrapper : IDictionary
- {
- private readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion);
- private readonly IDictionary _dict;
-
- public ConcurrentDictionaryWrapper(IDictionary wrapped)
- {
- this._dict = wrapped;
- }
-
- public void Add(TKey key, TValue value)
- {
- _lock.EnterWriteLock();
- try
- {
- _dict.Add(key, value);
- }
- finally
- {
- _lock.ExitWriteLock();
- }
- }
-
- public bool ContainsKey(TKey key)
- {
- _lock.EnterReadLock();
- try
- {
- return _dict.ContainsKey(key);
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
-
- public ICollection Keys
- {
- get
- {
- _lock.EnterReadLock();
- try
- {
- return _dict.Keys.AsReadOnly();
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
- }
-
- public bool Remove(TKey key)
- {
- _lock.EnterWriteLock();
- try
- {
- return _dict.Remove(key);
- }
- finally
- {
- _lock.ExitWriteLock();
- }
- }
-
- public bool TryGetValue(TKey key, out TValue value)
- {
- _lock.EnterReadLock();
- try
- {
- return _dict.TryGetValue(key, out value);
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
-
- public ICollection Values
- {
- get
- {
- _lock.EnterReadLock();
- try
- {
- return _dict.Values.AsReadOnly();
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
- }
-
- public TValue this[TKey key]
- {
- get
- {
- _lock.EnterReadLock();
- try
- {
- return _dict.TryGetValue(key, out TValue result) ? result : default;
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
- set
- {
- _lock.EnterWriteLock();
- try
- {
- _dict[key] = value;
- }
- finally
- {
- _lock.ExitWriteLock();
- }
- }
- }
-
- public void Add(KeyValuePair item)
- {
- _lock.EnterWriteLock();
- try
- {
- _dict.Add(item);
- }
- finally
- {
- _lock.ExitWriteLock();
- }
- }
-
- public void Clear()
- {
- _lock.EnterWriteLock();
- try
- {
- _dict.Clear();
- }
- finally
- {
- _lock.ExitWriteLock();
- }
- }
-
- public bool Contains(KeyValuePair item)
- {
- _lock.EnterReadLock();
- try
- {
- return _dict.Contains(item);
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
-
- public void CopyTo(KeyValuePair[] array, int arrayIndex)
- {
- _lock.EnterReadLock();
- try
- {
- _dict.CopyTo(array, arrayIndex);
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
-
- public int Count
- {
- get
- {
- _lock.EnterReadLock();
- try
- {
- return _dict.Count;
- }
- finally
- {
- _lock.ExitReadLock();
- }
- }
- }
-
- public bool IsReadOnly => _dict.IsReadOnly;
-
- public bool Remove(KeyValuePair item)
- {
- _lock.EnterWriteLock();
- try
- {
- return _dict.Remove(item);
- }
- finally
- {
- _lock.ExitWriteLock();
- }
- }
-
- public IEnumerator> GetEnumerator()
- {
- throw new NotSupportedException();
- }
-
- System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
- {
- throw new NotSupportedException();
- }
- }
-}
\ No newline at end of file
diff --git a/src/Lucene.Net/Support/DictionaryExtensions.cs b/src/Lucene.Net/Support/DictionaryExtensions.cs
index e65fcd4f8b..84cbf33599 100644
--- a/src/Lucene.Net/Support/DictionaryExtensions.cs
+++ b/src/Lucene.Net/Support/DictionaryExtensions.cs
@@ -88,26 +88,5 @@ public static TValue Put(this IDictionary dictionary
dictionary[key] = value;
return oldValue;
}
-
- ///
- /// Returns a concurrent wrapper for the current .
- ///
- /// The type of keys in the dictionary.
- /// The type of values in the dictionary.
- /// The collection to make concurrent (thread-safe).
- /// An object that acts as a read-only wrapper around the current .
- /// is null.
- ///
- /// To synchronize any modifications to the object, expose it only through this wrapper.
- ///
- /// The set returned uses simple locking and may not be the most performant solution, but it provides a quick
- /// way to make any set thread-safe.
- ///
- /// This method is an O(1) operation.
- ///
- internal static IDictionary AsConcurrent(this IDictionary dictionary)
- {
- return new ConcurrentDictionaryWrapper(dictionary);
- }
}
-}
\ No newline at end of file
+}