From 096d8121f01a15625e409b7499ace034d8a2910e Mon Sep 17 00:00:00 2001 From: Paul Irwin Date: Sat, 2 Nov 2024 13:02:47 -0600 Subject: [PATCH] WIP Test review E-I through TestBinaryDocValuesUpdates, #259 --- .../Index/Test2BBinaryDocValues.cs | 31 +-- src/Lucene.Net.Tests/Index/Test2BDocs.cs | 4 +- src/Lucene.Net.Tests/Index/Test2BPositions.cs | 11 +- src/Lucene.Net.Tests/Index/Test2BPostings.cs | 21 +- .../Index/Test2BPostingsBytes.cs | 31 +-- .../Index/Test2BSortedDocValues.cs | 29 +-- src/Lucene.Net.Tests/Index/Test2BTerms.cs | 31 +-- .../Index/Test4GBStoredFields.cs | 16 +- src/Lucene.Net.Tests/Index/TestAddIndexes.cs | 214 +++++++++++++----- .../Index/TestAllFilesHaveChecksumFooter.cs | 5 +- .../Index/TestAllFilesHaveCodecHeader.cs | 5 +- .../Index/TestAtomicUpdate.cs | 17 +- .../Index/TestBackwardsCompatibility.cs | 55 +++-- .../Index/TestBackwardsCompatibility3x.cs | 73 +++--- .../Index/TestBagOfPositions.cs | 15 +- .../Index/TestBagOfPostings.cs | 12 +- .../Index/TestBinaryDocValuesUpdates.cs | 79 ++----- 17 files changed, 366 insertions(+), 283 deletions(-) diff --git a/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs index f400c47841..a212007f2a 100644 --- a/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs +++ b/src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs @@ -49,13 +49,14 @@ public virtual void TestFixedBinary() { ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER; } - var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE); - IndexWriter w = new IndexWriter(dir, config); + + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE)); Document doc = new Document(); var bytes = new byte[4]; @@ -116,13 +117,13 @@ public virtual void TestVariableBinary() ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER; } - var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE); - IndexWriter w = new IndexWriter(dir, config); + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE)); Document doc = new Document(); var bytes = new byte[4]; @@ -172,4 +173,4 @@ public virtual void TestVariableBinary() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/Test2BDocs.cs b/src/Lucene.Net.Tests/Index/Test2BDocs.cs index 8b9d224fca..aa689a6022 100644 --- a/src/Lucene.Net.Tests/Index/Test2BDocs.cs +++ b/src/Lucene.Net.Tests/Index/Test2BDocs.cs @@ -63,7 +63,7 @@ public virtual void TestOverflow() Arrays.Fill(subReaders, ir); try { - new MultiReader(subReaders); + _ = new MultiReader(subReaders); // LUCENENET-specific: discard result Assert.Fail(); } catch (Exception expected) when (expected.IsIllegalArgumentException()) @@ -97,4 +97,4 @@ public virtual void TestExactlyAtLimit() dir2.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/Test2BPositions.cs b/src/Lucene.Net.Tests/Index/Test2BPositions.cs index aeed283dd3..fadad9dffb 100644 --- a/src/Lucene.Net.Tests/Index/Test2BPositions.cs +++ b/src/Lucene.Net.Tests/Index/Test2BPositions.cs @@ -42,8 +42,8 @@ namespace Lucene.Net.Index [SuppressCodecs("SimpleText", "Memory", "Direct")] [TestFixture] public class Test2BPositions : LuceneTestCase - // uses lots of space and takes a few minutes { + // uses lots of space and takes a few minutes [Ignore("Very slow. Enable manually by removing Ignore.")] [Test] public virtual void Test() @@ -75,7 +75,7 @@ public virtual void Test() Field field = new Field("field", new MyTokenStream(), ft); doc.Add(field); - int numDocs = (int.MaxValue / 26) + 1; + const int numDocs = (int.MaxValue / 26) + 1; for (int i = 0; i < numDocs; i++) { w.AddDocument(doc); @@ -91,10 +91,11 @@ public virtual void Test() public sealed class MyTokenStream : TokenStream { - internal readonly ICharTermAttribute termAtt; - internal readonly IPositionIncrementAttribute posIncAtt; + private readonly ICharTermAttribute termAtt; + private readonly IPositionIncrementAttribute posIncAtt; internal int index; + // LUCENENET-specific: must call AddAttribute from ctor in .NET public MyTokenStream() { termAtt = AddAttribute(); @@ -121,4 +122,4 @@ public override void Reset() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/Test2BPostings.cs b/src/Lucene.Net.Tests/Index/Test2BPostings.cs index 19640a9e3b..a192373560 100644 --- a/src/Lucene.Net.Tests/Index/Test2BPostings.cs +++ b/src/Lucene.Net.Tests/Index/Test2BPostings.cs @@ -54,14 +54,14 @@ public virtual void Test() ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER; } - var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE); + var iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE); - IndexWriter w = new IndexWriter(dir, config); + IndexWriter w = new IndexWriter(dir, iwc); MergePolicy mp = w.Config.MergePolicy; if (mp is LogByteSizeMergePolicy) @@ -77,7 +77,7 @@ public virtual void Test() Field field = new Field("field", new MyTokenStream(), ft); doc.Add(field); - int numDocs = (int.MaxValue / 26) + 1; + const int numDocs = (int.MaxValue / 26) + 1; for (int i = 0; i < numDocs; i++) { w.AddDocument(doc); @@ -93,9 +93,10 @@ public virtual void Test() public sealed class MyTokenStream : TokenStream { - internal readonly ICharTermAttribute termAtt; + private readonly ICharTermAttribute termAtt; internal int index; + // LUCENENET-specific: must call AddAttribute from ctor in .NET public MyTokenStream() { termAtt = AddAttribute(); @@ -119,4 +120,4 @@ public override void Reset() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs index b79695a287..d525138939 100644 --- a/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs +++ b/src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs @@ -41,13 +41,13 @@ namespace Lucene.Net.Index /// so you get > Integer.MAX_VALUE postings data for the term /// @lucene.experimental /// + // disable Lucene3x: older lucene formats always had this issue. [SuppressCodecs("SimpleText", "Memory", "Direct", "Lucene3x")] [TestFixture] public class Test2BPostingsBytes : LuceneTestCase - // disable Lucene3x: older lucene formats always had this issue. - // @Absurd @Ignore takes ~20GB-30GB of space and 10 minutes. - // with some codecs needs more heap space as well. { + // @Absurd @Ignore takes ~20GB-30GB of space and 10 minutes. + // with some codecs needs more heap space as well. [Ignore("Very slow. Enable manually by removing Ignore.")] [Test] public virtual void Test() @@ -58,13 +58,13 @@ public virtual void Test() ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER; } - var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE); - IndexWriter w = new IndexWriter(dir, config); + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE)); MergePolicy mp = w.Config.MergePolicy; if (mp is LogByteSizeMergePolicy) @@ -106,7 +106,8 @@ public virtual void Test() { ((MockDirectoryWrapper)dir2).Throttling = Throttling.NEVER; } - IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter w2 = new IndexWriter(dir2, + new IndexWriterConfig(TEST_VERSION_CURRENT, null)); w2.AddIndexes(mr); w2.ForceMerge(1); w2.Dispose(); @@ -121,7 +122,8 @@ public virtual void Test() { ((MockDirectoryWrapper)dir3).Throttling = Throttling.NEVER; } - IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter w3 = new IndexWriter(dir3, + new IndexWriterConfig(TEST_VERSION_CURRENT, null)); w3.AddIndexes(mr); w3.ForceMerge(1); w3.Dispose(); @@ -134,10 +136,11 @@ public virtual void Test() public sealed class MyTokenStream : TokenStream { - internal readonly ICharTermAttribute termAtt; + private readonly ICharTermAttribute termAtt; internal int index; internal int n; + // LUCENENET-specific: must call AddAttribute from ctor in .NET public MyTokenStream() { termAtt = AddAttribute(); @@ -162,4 +165,4 @@ public override void Reset() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs index 5af76e5c36..70cd900834 100644 --- a/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs +++ b/src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs @@ -48,12 +48,13 @@ public virtual void TestFixedSorted() ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER; } - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE)); + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE)); Document doc = new Document(); var bytes = new byte[2]; @@ -110,13 +111,13 @@ public virtual void Test2BOrds() ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER; } - var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE); - IndexWriter w = new IndexWriter(dir, config); + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE)); Document doc = new Document(); var bytes = new byte[4]; @@ -169,4 +170,4 @@ public virtual void Test2BOrds() // TODO: variable } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/Test2BTerms.cs b/src/Lucene.Net.Tests/Index/Test2BTerms.cs index 698ad79fe6..e55c2735fd 100644 --- a/src/Lucene.Net.Tests/Index/Test2BTerms.cs +++ b/src/Lucene.Net.Tests/Index/Test2BTerms.cs @@ -59,12 +59,12 @@ public class Test2BTerms : LuceneTestCase private sealed class MyTokenStream : TokenStream { - internal readonly int tokensPerDoc; - internal int tokenCount; + private readonly int tokensPerDoc; + private int tokenCount; public readonly IList savedTerms = new JCG.List(); - internal int nextSave; - internal long termCounter; - internal readonly Random random; + private int nextSave; + private long termCounter; + private readonly Random random; public MyTokenStream(Random random, int tokensPerDoc) : base(new MyAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY)) @@ -140,7 +140,7 @@ public override object Clone() private sealed class MyAttributeFactory : AttributeFactory { - internal readonly AttributeFactory @delegate; + private readonly AttributeFactory @delegate; public MyAttributeFactory(AttributeFactory @delegate) { @@ -172,7 +172,7 @@ public virtual void Test2BTerms_Mem() throw RuntimeException.Create("this test cannot run with PreFlex codec"); } Console.WriteLine("Starting Test2B"); - long TERM_COUNT = ((long)int.MaxValue) + 100000000; + const long TERM_COUNT = ((long)int.MaxValue) + 100000000; int TERMS_PER_DOC = TestUtil.NextInt32(Random, 100000, 1000000); @@ -188,12 +188,13 @@ public virtual void Test2BTerms_Mem() if (true) { - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE)); + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE)); MergePolicy mp = w.Config.MergePolicy; if (mp is LogByteSizeMergePolicy) @@ -202,7 +203,7 @@ public virtual void Test2BTerms_Mem() ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024; } - Documents.Document doc = new Documents.Document(); + Document doc = new Document(); MyTokenStream ts = new MyTokenStream(Random, TERMS_PER_DOC); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); @@ -311,4 +312,4 @@ private void TestSavedTerms(IndexReader r, IList terms) Assert.IsFalse(failed); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs index f4702c9e45..7a4ae203f8 100644 --- a/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs +++ b/src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs @@ -55,13 +55,13 @@ public virtual void Test() MockDirectoryWrapper dir = new MockDirectoryWrapper(Random, new MMapDirectory(CreateTempDir("4GBStoredFields"))); dir.Throttling = Throttling.NEVER; - var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) - .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .SetRAMBufferSizeMB(256.0) - .SetMergeScheduler(new ConcurrentMergeScheduler()) - .SetMergePolicy(NewLogMergePolicy(false, 10)) - .SetOpenMode(OpenMode.CREATE); - IndexWriter w = new IndexWriter(dir, config); + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetRAMBufferSizeMB(256.0) + .SetMergeScheduler(new ConcurrentMergeScheduler()) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetOpenMode(OpenMode.CREATE)); MergePolicy mp = w.Config.MergePolicy; if (mp is LogByteSizeMergePolicy) @@ -129,4 +129,4 @@ public virtual void Test() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs index 0e3bd371b4..860a2b3798 100644 --- a/src/Lucene.Net.Tests/Index/TestAddIndexes.cs +++ b/src/Lucene.Net.Tests/Index/TestAddIndexes.cs @@ -68,14 +68,21 @@ public virtual void TestSimpleCase() IndexWriter writer = null; - writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE)); + writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, + new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE)); // add 100 documents AddDocs(writer, 100); Assert.AreEqual(100, writer.MaxDoc); writer.Dispose(); TestUtil.CheckIndex(dir); - writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy(false))); + writer = NewWriter( + aux, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMergePolicy(NewLogMergePolicy(false)) + ); // add 40 documents in separate files AddDocs(writer, 40); Assert.AreEqual(40, writer.MaxDoc); @@ -295,11 +302,23 @@ public virtual void TestAddSelf() Assert.AreEqual(100, writer.MaxDoc); writer.Dispose(); - writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false))); + writer = NewWriter( + aux, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(1000) + .SetMergePolicy(NewLogMergePolicy(false)) + ); // add 140 documents in separate files AddDocs(writer, 40); writer.Dispose(); - writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false))); + writer = NewWriter( + aux, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(1000) + .SetMergePolicy(NewLogMergePolicy(false)) + ); AddDocs(writer, 100); writer.Dispose(); @@ -335,7 +354,13 @@ public virtual void TestNoTailSegments() SetUpDirs(dir, aux); - IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4))); + IndexWriter writer = NewWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy(4)) + ); AddDocs(writer, 10); writer.AddIndexes(aux); @@ -360,7 +385,12 @@ public virtual void TestNoCopySegments() SetUpDirs(dir, aux); - IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(9).SetMergePolicy(NewLogMergePolicy(4))); + IndexWriter writer = NewWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(9) + .SetMergePolicy(NewLogMergePolicy(4))); AddDocs(writer, 2); writer.AddIndexes(aux); @@ -385,7 +415,13 @@ public virtual void TestNoMergeAfterCopy() SetUpDirs(dir, aux); - IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4))); + IndexWriter writer = NewWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy(4)) + ); writer.AddIndexes(aux, new MockDirectoryWrapper(Random, new RAMDirectory(aux, NewIOContext(Random)))); Assert.AreEqual(1060, writer.MaxDoc); @@ -409,7 +445,8 @@ public virtual void TestMergeAfterCopy() SetUpDirs(dir, aux, true); - IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); + IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NoMergePolicy.COMPOUND_FILES); IndexWriter writer = new IndexWriter(aux, dontMergeConfig); for (int i = 0; i < 20; i++) { @@ -420,7 +457,13 @@ public virtual void TestMergeAfterCopy() Assert.AreEqual(10, reader.NumDocs); reader.Dispose(); - writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(4).SetMergePolicy(NewLogMergePolicy(4))); + writer = NewWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(4) + .SetMergePolicy(NewLogMergePolicy(4)) + ); if (Verbose) { @@ -446,13 +489,20 @@ public virtual void TestMoreMerges() SetUpDirs(dir, aux, true); - IndexWriter writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(10))); + IndexWriter writer = NewWriter( + aux2, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(100) + .SetMergePolicy(NewLogMergePolicy(10)) + ); writer.AddIndexes(aux); Assert.AreEqual(30, writer.MaxDoc); Assert.AreEqual(3, writer.SegmentCount); writer.Dispose(); - IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); + IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NoMergePolicy.COMPOUND_FILES); writer = new IndexWriter(aux, dontMergeConfig); for (int i = 0; i < 27; i++) { @@ -463,7 +513,8 @@ public virtual void TestMoreMerges() Assert.AreEqual(3, reader.NumDocs); reader.Dispose(); - dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); + dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NoMergePolicy.COMPOUND_FILES); writer = new IndexWriter(aux2, dontMergeConfig); for (int i = 0; i < 8; i++) { @@ -474,7 +525,13 @@ public virtual void TestMoreMerges() Assert.AreEqual(22, reader.NumDocs); reader.Dispose(); - writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(6).SetMergePolicy(NewLogMergePolicy(4))); + writer = NewWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(6) + .SetMergePolicy(NewLogMergePolicy(4)) + ); writer.AddIndexes(aux, aux2); Assert.AreEqual(1040, writer.MaxDoc); @@ -485,14 +542,16 @@ public virtual void TestMoreMerges() aux2.Dispose(); } - private IndexWriter NewWriter(Directory dir, IndexWriterConfig conf) + // LUCENENET-specific: made static + private static IndexWriter NewWriter(Directory dir, IndexWriterConfig conf) { conf.SetMergePolicy(new LogDocMergePolicy()); IndexWriter writer = new IndexWriter(dir, conf); return writer; } - private void AddDocs(IndexWriter writer, int numDocs) + // LUCENENET-specific: made static + private static void AddDocs(IndexWriter writer, int numDocs) { for (int i = 0; i < numDocs; i++) { @@ -502,7 +561,8 @@ private void AddDocs(IndexWriter writer, int numDocs) } } - private void AddDocs2(IndexWriter writer, int numDocs) + // LUCENENET-specific: made static + private static void AddDocs2(IndexWriter writer, int numDocs) { for (int i = 0; i < numDocs; i++) { @@ -512,7 +572,8 @@ private void AddDocs2(IndexWriter writer, int numDocs) } } - private void VerifyNumDocs(Directory dir, int numDocs) + // LUCENENET-specific: made static + private static void VerifyNumDocs(Directory dir, int numDocs) { IndexReader reader = DirectoryReader.Open(dir); Assert.AreEqual(numDocs, reader.MaxDoc); @@ -520,7 +581,8 @@ private void VerifyNumDocs(Directory dir, int numDocs) reader.Dispose(); } - private void VerifyTermDocs(Directory dir, Term term, int numDocs) + // LUCENENET-specific: made static + private static void VerifyTermDocs(Directory dir, Term term, int numDocs) { IndexReader reader = DirectoryReader.Open(dir); DocsEnum docsEnum = TestUtil.Docs(Random, reader, term.Field, term.Bytes, null, null, DocsFlags.NONE); @@ -542,7 +604,7 @@ private void SetUpDirs(Directory dir, Directory aux, bool withID) { IndexWriter writer = null; - writer = NewWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000)); + writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000)); // add 1000 documents in 1 segment if (withID) { @@ -556,7 +618,13 @@ private void SetUpDirs(Directory dir, Directory aux, bool withID) Assert.AreEqual(1, writer.SegmentCount); writer.Dispose(); - writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10))); + writer = NewWriter( + aux, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(1000) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + ); // add 30 documents in 3 segments for (int i = 0; i < 3; i++) { @@ -569,7 +637,13 @@ private void SetUpDirs(Directory dir, Directory aux, bool withID) AddDocs(writer, 10); } writer.Dispose(); - writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10))); + writer = NewWriter( + aux, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(1000) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + ); } Assert.AreEqual(30, writer.MaxDoc); Assert.AreEqual(3, writer.SegmentCount); @@ -584,7 +658,9 @@ public virtual void TestHangOnClose() LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(); lmp.NoCFSRatio = 0.0; lmp.MergeFactor = 100; - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(5).SetMergePolicy(lmp)); + IndexWriter writer = new IndexWriter(dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(5).SetMergePolicy(lmp)); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); @@ -615,7 +691,9 @@ public virtual void TestHangOnClose() lmp.MinMergeMB = 0.0001; lmp.NoCFSRatio = 0.0; lmp.MergeFactor = 4; - writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp)); + writer = new IndexWriter(dir2, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp)); writer.AddIndexes(dir); writer.Dispose(); dir.Dispose(); @@ -624,7 +702,8 @@ public virtual void TestHangOnClose() // TODO: these are also in TestIndexWriter... add a simple doc-writing method // like this to LuceneTestCase? - private void AddDoc(IndexWriter writer) + // LUCENENET specific - made static + private static void AddDoc(IndexWriter writer) { Document doc = new Document(); doc.Add(NewTextField("content", "aaa", Field.Store.NO)); @@ -643,14 +722,16 @@ private abstract class RunAddIndexesThreads internal const int NUM_THREADS = 5; internal readonly ThreadJob[] threads = new ThreadJob[NUM_THREADS]; - public RunAddIndexesThreads(TestAddIndexes outerInstance, int numCopy) + public RunAddIndexesThreads(int numCopy) { NUM_COPY = numCopy; dir = new MockDirectoryWrapper(Random, new RAMDirectory()); - IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2)); for (int i = 0; i < NUM_INIT_DOCS; i++) { - outerInstance.AddDoc(writer); + AddDoc(writer); } writer.Dispose(); @@ -665,7 +746,7 @@ public RunAddIndexesThreads(TestAddIndexes outerInstance, int numCopy) } } - internal virtual void LaunchThreads(int numIter) + internal void LaunchThreads(int numIter) { for (int i = 0; i < NUM_THREADS; i++) { @@ -718,7 +799,7 @@ public override void Run() } } - internal virtual void JoinThreads() + internal void JoinThreads() { for (int i = 0; i < NUM_THREADS; i++) { @@ -726,13 +807,13 @@ internal virtual void JoinThreads() } } - internal virtual void Close(bool doWait) + internal void Close(bool doWait) { didClose = true; writer2.Dispose(doWait); } - internal virtual void CloseDir() + internal void CloseDir() { for (int i = 0; i < NUM_COPY; i++) { @@ -748,8 +829,8 @@ internal virtual void CloseDir() private class CommitAndAddIndexes : RunAddIndexesThreads { - public CommitAndAddIndexes(TestAddIndexes outerInstance, int numCopy) - : base(outerInstance, numCopy) + public CommitAndAddIndexes(int numCopy) + : base(numCopy) { } @@ -824,7 +905,7 @@ public virtual void TestAddIndexesWithThreads() { int NUM_ITER = TestNightly ? 15 : 5; const int NUM_COPY = 3; - CommitAndAddIndexes c = new CommitAndAddIndexes(this, NUM_COPY); + CommitAndAddIndexes c = new CommitAndAddIndexes(NUM_COPY); c.LaunchThreads(NUM_ITER); for (int i = 0; i < 100; i++) @@ -851,8 +932,8 @@ public virtual void TestAddIndexesWithThreads() private class CommitAndAddIndexes2 : CommitAndAddIndexes { - public CommitAndAddIndexes2(TestAddIndexes outerInstance, int numCopy) - : base(outerInstance, numCopy) + public CommitAndAddIndexes2(int numCopy) + : base(numCopy) { } @@ -879,7 +960,7 @@ internal override void Handle(Exception t) public virtual void TestAddIndexesWithClose() { const int NUM_COPY = 3; - CommitAndAddIndexes2 c = new CommitAndAddIndexes2(this, NUM_COPY); + CommitAndAddIndexes2 c = new CommitAndAddIndexes2(NUM_COPY); //c.writer2.setInfoStream(System.out); c.LaunchThreads(-1); @@ -896,8 +977,8 @@ public virtual void TestAddIndexesWithClose() private class CommitAndAddIndexes3 : RunAddIndexesThreads { - public CommitAndAddIndexes3(TestAddIndexes outerInstance, int numCopy) - : base(outerInstance, numCopy) + public CommitAndAddIndexes3(int numCopy) + : base(numCopy) { } @@ -990,7 +1071,7 @@ internal override void Handle(Exception t) public virtual void TestAddIndexesWithCloseNoWait() { const int NUM_COPY = 50; - CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY); + CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY); c.LaunchThreads(-1); Thread.Sleep(TestUtil.NextInt32(Random, 10, 500)); @@ -1019,7 +1100,7 @@ public virtual void TestAddIndexesWithCloseNoWait() public virtual void TestAddIndexesWithRollback() { int NUM_COPY = TestNightly ? 50 : 5; - CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY); + CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY); c.LaunchThreads(-1); Thread.Sleep(TestUtil.NextInt32(Random, 10, 500)); @@ -1055,6 +1136,7 @@ public virtual void TestExistingDeletes() writer.Dispose(); } + // LUCENENET-specific: renamed to avoid conflict with variables above IndexWriterConfig conf_ = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); IndexWriter writer_ = new IndexWriter(dirs[0], conf_); @@ -1080,7 +1162,8 @@ public virtual void TestExistingDeletes() } // just like addDocs but with ID, starting from docStart - private void AddDocsWithID(IndexWriter writer, int numDocs, int docStart) + // LUCENENET-specific: made static + private static void AddDocsWithID(IndexWriter writer, int numDocs, int docStart) { for (int i = 0; i < numDocs; i++) { @@ -1102,7 +1185,8 @@ public virtual void TestSimpleCaseCustomCodec() Codec codec = new CustomPerFieldCodec(); IndexWriter writer = null; - writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec)); + writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE).SetCodec(codec)); // add 100 documents AddDocsWithID(writer, 100, 0); Assert.AreEqual(100, writer.MaxDoc); @@ -1110,14 +1194,26 @@ public virtual void TestSimpleCaseCustomCodec() writer.Dispose(); TestUtil.CheckIndex(dir); - writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false))); + writer = NewWriter( + aux, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetCodec(codec) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy(false)) + ); // add 40 documents in separate files AddDocs(writer, 40); Assert.AreEqual(40, writer.MaxDoc); writer.Commit(); writer.Dispose(); - writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec)); + writer = NewWriter( + aux2, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetCodec(codec) + ); // add 40 documents in compound files AddDocs2(writer, 50); Assert.AreEqual(50, writer.MaxDoc); @@ -1125,7 +1221,12 @@ public virtual void TestSimpleCaseCustomCodec() writer.Dispose(); // test doc count before segments are merged - writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetCodec(codec)); + writer = NewWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetCodec(codec) + ); Assert.AreEqual(100, writer.MaxDoc); writer.AddIndexes(aux, aux2); Assert.AreEqual(190, writer.MaxDoc); @@ -1138,16 +1239,9 @@ public virtual void TestSimpleCaseCustomCodec() private sealed class CustomPerFieldCodec : Lucene46Codec { - internal readonly PostingsFormat simpleTextFormat; - internal readonly PostingsFormat defaultFormat; - internal readonly PostingsFormat mockSepFormat; - - public CustomPerFieldCodec() - { - simpleTextFormat = Codecs.PostingsFormat.ForName("SimpleText"); - defaultFormat = Codecs.PostingsFormat.ForName("Lucene41"); - mockSepFormat = Codecs.PostingsFormat.ForName("MockSep"); - } + private readonly PostingsFormat simpleTextFormat = PostingsFormat.ForName("SimpleText"); + private readonly PostingsFormat defaultFormat = PostingsFormat.ForName("Lucene41"); + private readonly PostingsFormat mockSepFormat = PostingsFormat.ForName("MockSep"); public override PostingsFormat GetPostingsFormatForField(string field) { @@ -1186,7 +1280,7 @@ public virtual void TestNonCFSLeftovers() IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dirs[0]), DirectoryReader.Open(dirs[1]) }; Directory dir = new MockDirectoryWrapper(Random, new RAMDirectory()); - IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NewLogMergePolicy(true)); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy(true)); MergePolicy lmp = conf.MergePolicy; // Force creation of CFS: lmp.NoCFSRatio = 1.0; @@ -1380,9 +1474,7 @@ public virtual void TestLocksBlock() w2.AddIndexes(src); Assert.Fail("did not hit expected exception"); } -#pragma warning disable 168 - catch (LockObtainFailedException lofe) -#pragma warning restore 168 + catch (LockObtainFailedException /*lofe*/) { // expected } @@ -1390,4 +1482,4 @@ public virtual void TestLocksBlock() IOUtils.Dispose(w1, w2, src, dest); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs b/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs index 9ac3c443ae..64a0e39fb3 100644 --- a/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs +++ b/src/Lucene.Net.Tests/Index/TestAllFilesHaveChecksumFooter.cs @@ -75,7 +75,8 @@ public virtual void Test() dir.Dispose(); } - private void CheckHeaders(Directory dir) + // LUCENENET-specific: made static + private static void CheckHeaders(Directory dir) { foreach (string file in dir.ListAll()) { @@ -111,4 +112,4 @@ private void CheckHeaders(Directory dir) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs b/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs index 4c86228d8d..c965f96a28 100644 --- a/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs +++ b/src/Lucene.Net.Tests/Index/TestAllFilesHaveCodecHeader.cs @@ -76,7 +76,8 @@ public virtual void Test() dir.Dispose(); } - private void CheckHeaders(Directory dir) + // LUCENENET specific - made static + private static void CheckHeaders(Directory dir) { foreach (string file in dir.ListAll()) { @@ -117,4 +118,4 @@ private void CheckHeaders(Directory dir) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs b/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs index c45c620b75..48410fe188 100644 --- a/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs +++ b/src/Lucene.Net.Tests/Index/TestAtomicUpdate.cs @@ -37,8 +37,8 @@ private abstract class TimedThread : ThreadJob { internal volatile bool failed; internal int count; - internal static float RUN_TIME_MSEC = AtLeast(500); - internal TimedThread[] allThreads; + private static float RUN_TIME_MSEC = AtLeast(500); + private TimedThread[] allThreads; public abstract void DoWork(); @@ -73,7 +73,7 @@ public override void Run() } } - internal virtual bool AnyErrors() + private bool AnyErrors() { for (int i = 0; i < allThreads.Length; i++) { @@ -101,7 +101,7 @@ public override void DoWork() // Update all 100 docs... for (int i = 0; i < 100; i++) { - Documents.Document d = new Documents.Document(); + Document d = new Document(); d.Add(new StringField("id", Convert.ToString(i), Field.Store.YES)); d.Add(new TextField("contents", English.Int32ToEnglish(i + 10 * count), Field.Store.NO)); writer.UpdateDocument(new Term("id", Convert.ToString(i)), d); @@ -136,14 +136,15 @@ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; - IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(7); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(7); ((TieredMergePolicy)conf.MergePolicy).MaxMergeAtOnce = 3; IndexWriter writer = RandomIndexWriter.MockIndexWriter(directory, conf, Random); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { - Documents.Document d = new Documents.Document(); + Document d = new Document(); d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES)); d.Add(NewTextField("contents", English.Int32ToEnglish(i), Field.Store.NO)); if ((i - 1) % 7 == 0) @@ -213,7 +214,7 @@ public virtual void TestAtomicUpdates() { RunTest(directory); } - System.IO.Directory.Delete(dirPath.FullName, true); + TestUtil.Rm(dirPath); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs index cc8213f30b..b5b7c8787e 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility.cs @@ -158,18 +158,18 @@ public void testCreateMoreTermsIndex() throws Exception { } */ - internal static readonly string[] oldNames = new string[] { + internal static readonly string[] oldNames = { "40.cfs", "40.nocfs", "41.cfs", "41.nocfs", "42.cfs", "42.nocfs", "45.cfs", "45.nocfs", "461.cfs", "461.nocfs" }; - internal readonly string[] unsupportedNames = new string[] { + internal readonly string[] unsupportedNames = { "19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs", "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs", "24.cfs", "24.nocfs", "29.cfs", "29.nocfs" }; - internal static readonly string[] oldSingleSegmentNames = new string[] { + internal static readonly string[] oldSingleSegmentNames = { "40.optimized.cfs", "40.optimized.nocfs" }; @@ -178,7 +178,7 @@ public void testCreateMoreTermsIndex() throws Exception { /// /// Randomizes the use of some of hte constructor variations /// - private IndexUpgrader NewIndexUpgrader(Directory dir) + private static IndexUpgrader NewIndexUpgrader(Directory dir) { bool streamType = Random.NextBoolean(); int choice = TestUtil.NextInt32(Random, 0, 2); @@ -256,9 +256,7 @@ public virtual void TestUnsupportedOldIndexes() reader = DirectoryReader.Open(dir); Assert.Fail("DirectoryReader.open should not pass for " + unsupportedNames[i]); } -#pragma warning disable 168 - catch (IndexFormatTooOldException e) -#pragma warning restore 168 + catch (IndexFormatTooOldException /*e*/) { // pass } @@ -310,7 +308,7 @@ public virtual void TestUnsupportedOldIndexes() indexStatus = checker.DoCheckIndex(); } Assert.IsFalse(indexStatus.Clean); - Assert.IsTrue(sb.ToString().Contains(typeof(IndexFormatTooOldException).Name)); + Assert.IsTrue(sb.ToString().Contains(nameof(IndexFormatTooOldException))); dir.Dispose(); TestUtil.Rm(oldIndxeDir); @@ -568,7 +566,8 @@ public virtual void SearchIndex(Directory dir, string oldName) reader.Dispose(); } - private int Compare(string name, string v) + // LUCENENET-specific: made static + private static int Compare(string name, string v) { int v0 = Convert.ToInt32(name.Substring(0, 2)); int v1 = Convert.ToInt32(v); @@ -656,7 +655,8 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM mp.NoCFSRatio = doCFS ? 1.0 : 0.0; mp.MaxCFSSegmentSizeMB = double.PositiveInfinity; // TODO: remove randomness - IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp); IndexWriter writer = new IndexWriter(dir, conf); for (int i = 0; i < 35; i++) @@ -676,12 +676,14 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM mp = new LogByteSizeMergePolicy(); mp.NoCFSRatio = doCFS ? 1.0 : 0.0; // TODO: remove randomness - conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp); + conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(mp); writer = new IndexWriter(dir, conf); AddNoProxDoc(writer); writer.Dispose(); - conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES); + conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetUseCompoundFile(doCFS).SetMaxBufferedDocs(10).SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES); writer = new IndexWriter(dir, conf); Term searchTerm = new Term("id", "7"); writer.DeleteDocuments(searchTerm); @@ -693,7 +695,8 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM return indexDir; } - private void AddDoc(IndexWriter writer, int id) + // LUCENENET-specific: made static + private static void AddDoc(IndexWriter writer, int id) { Document doc = new Document(); doc.Add(new TextField("content", "aaa", Field.Store.NO)); @@ -746,7 +749,8 @@ private void AddDoc(IndexWriter writer, int id) writer.AddDocument(doc); } - private void AddNoProxDoc(IndexWriter writer) + // LUCENENET-specific: made static + private static void AddNoProxDoc(IndexWriter writer) { Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); @@ -761,7 +765,8 @@ private void AddNoProxDoc(IndexWriter writer) writer.AddDocument(doc); } - private int CountDocs(DocsEnum docs) + // LUCENENET-specific: made static + private static int CountDocs(DocsEnum docs) { int count = 0; while ((docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) @@ -854,7 +859,6 @@ public virtual void TestNumericFields() { foreach (string name in oldNames) { - Directory dir = oldIndexDirs[name]; IndexReader reader = DirectoryReader.Open(dir); IndexSearcher searcher = NewSearcher(reader); @@ -873,6 +877,7 @@ public virtual void TestNumericFields() } // check that also lower-precision fields are ok + // LUCENENET-specific: renamed to hits_ to avoid conflict with local variable ScoreDoc[] hits_ = searcher.Search(NumericRangeQuery.NewInt32Range("trieInt", 4, int.MinValue, int.MaxValue, false, false), 100).ScoreDocs; Assert.AreEqual(34, hits_.Length, "wrong number of hits"); @@ -899,7 +904,8 @@ public virtual void TestNumericFields() } } - private int CheckAllSegmentsUpgraded(Directory dir) + // LUCENENET-specific: made static + private static int CheckAllSegmentsUpgraded(Directory dir) { SegmentInfos infos = new SegmentInfos(); infos.Read(dir); @@ -914,7 +920,8 @@ private int CheckAllSegmentsUpgraded(Directory dir) return infos.Count; } - private int GetNumberOfSegments(Directory dir) + // LUCENENET-specific: made static + private static int GetNumberOfSegments(Directory dir) { SegmentInfos infos = new SegmentInfos(); infos.Read(dir); @@ -947,7 +954,6 @@ public virtual void TestUpgradeOldIndex() [Slow] public virtual void TestCommandLineArgs() { - foreach (string name in oldIndexDirs.Keys) { DirectoryInfo dir = CreateTempDir(name); @@ -1022,8 +1028,9 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions() for (int i = 0; i < 3; i++) { // only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge: - MergePolicy mp = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy(); - IndexWriterConfig iwc = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(mp); + MergePolicy mp = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy(); + IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(mp); IndexWriter w = new IndexWriter(ramDir, iwc); // add few more docs: for (int j = 0; j < RandomMultiplier * Random.Next(30); j++) @@ -1035,8 +1042,10 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions() // add dummy segments (which are all in current // version) to single segment index - MergePolicy mp_ = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy(); - IndexWriterConfig iwc_ = (new IndexWriterConfig(TEST_VERSION_CURRENT, null)).SetMergePolicy(mp_); + // LUCENENET-specific: renamed variables to avoid conflict with ones above + MergePolicy mp_ = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy(); + IndexWriterConfig iwc_ = new IndexWriterConfig(TEST_VERSION_CURRENT, null) + .SetMergePolicy(mp_); IndexWriter iw = new IndexWriter(dir, iwc_); iw.AddIndexes(ramDir); iw.Dispose(false); diff --git a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs index 9ac0bf703e..f95828c918 100644 --- a/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs +++ b/src/Lucene.Net.Tests/Index/TestBackwardsCompatibility3x.cs @@ -36,7 +36,6 @@ namespace Lucene.Net.Index using BytesRef = Lucene.Net.Util.BytesRef; using Constants = Lucene.Net.Util.Constants; using Directory = Lucene.Net.Store.Directory; - //using IndexOptions = Lucene.Net.Index.IndexOptions; using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator; using Document = Lucene.Net.Documents.Document; using DoubleDocValuesField = Lucene.Net.Documents.DoubleDocValuesField; @@ -113,18 +112,18 @@ public void testCreateSingleSegmentNoCFS() throws IOException { // LUCENENET specific to load resources for this type internal const string CURRENT_RESOURCE_DIRECTORY = "Lucene.Net.Tests.Index."; - internal static readonly string[] oldNames = new string[] { + internal static readonly string[] oldNames = { "30.cfs", "30.nocfs", "31.cfs", "31.nocfs", "32.cfs", "32.nocfs", "34.cfs", "34.nocfs" }; - internal readonly string[] unsupportedNames = new string[] { + internal readonly string[] unsupportedNames = { "19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs", "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs", "24.cfs", "24.nocfs", "29.cfs", "29.nocfs" }; - internal static readonly string[] oldSingleSegmentNames = new string[] { + internal static readonly string[] oldSingleSegmentNames = { "31.optimized.cfs", "31.optimized.nocfs" }; @@ -189,9 +188,7 @@ public virtual void TestUnsupportedOldIndexes() reader = DirectoryReader.Open(dir); Assert.Fail("DirectoryReader.open should not pass for " + unsupportedNames[i]); } -#pragma warning disable 168 - catch (IndexFormatTooOldException e) -#pragma warning restore 168 + catch (IndexFormatTooOldException /*e*/) { // pass } @@ -206,7 +203,8 @@ public virtual void TestUnsupportedOldIndexes() try { - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); + writer = new IndexWriter(dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Assert.Fail("IndexWriter creation should not pass for " + unsupportedNames[i]); } catch (IndexFormatTooOldException e) @@ -240,7 +238,7 @@ public virtual void TestUnsupportedOldIndexes() CheckIndex.Status indexStatus = checker.DoCheckIndex(); Assert.IsFalse(indexStatus.Clean); checker.InfoStream.Flush(); - Assert.IsTrue(bos.ToString().Contains(typeof(IndexFormatTooOldException).Name)); + Assert.IsTrue(bos.ToString().Contains(nameof(IndexFormatTooOldException))); dir.Dispose(); } @@ -256,7 +254,8 @@ public virtual void TestFullyMergeOldIndex() Console.WriteLine("\nTEST: index=" + name); } Directory dir = NewDirectory(oldIndexDirs[name]); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); + IndexWriter w = new IndexWriter(dir, + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); w.ForceMerge(1); w.Dispose(); @@ -274,7 +273,8 @@ public virtual void TestAddOldIndexes() Console.WriteLine("\nTEST: old index " + name); } Directory targetDir = NewDirectory(); - IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); + IndexWriter w = new IndexWriter(targetDir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); w.AddIndexes(oldIndexDirs[name]); if (Verbose) { @@ -294,7 +294,8 @@ public virtual void TestAddOldIndexesReader() IndexReader reader = DirectoryReader.Open(oldIndexDirs[name]); Directory targetDir = NewDirectory(); - IndexWriter w = new IndexWriter(targetDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); + IndexWriter w = new IndexWriter(targetDir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); w.AddIndexes(reader); w.Dispose(); reader.Dispose(); @@ -388,7 +389,8 @@ public virtual void TestDeleteOldIndex() } } - private void DoTestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader) + // LUCENENET-specific: made static + private static void DoTestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader) { int hitCount = hits.Length; Assert.AreEqual(expectedCount, hitCount, "wrong number of hits"); @@ -531,7 +533,8 @@ public virtual void SearchIndex(Directory dir, string oldName) reader.Dispose(); } - private int Compare(string name, string v) + // LUCENENET specific - made static + private static int Compare(string name, string v) { int v0 = Convert.ToInt32(name.Substring(0, 2)); int v1 = Convert.ToInt32(v); @@ -619,7 +622,8 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM mp.NoCFSRatio = doCFS ? 1.0 : 0.0; mp.MaxCFSSegmentSizeMB = double.PositiveInfinity; // TODO: remove randomness - IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS); IndexWriter writer = new IndexWriter(dir, conf); for (int i = 0; i < 35; i++) @@ -639,12 +643,15 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM mp = new LogByteSizeMergePolicy(); mp.NoCFSRatio = doCFS ? 1.0 : 0.0; // TODO: remove randomness - conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS); + conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10).SetMergePolicy(mp).SetUseCompoundFile(doCFS); writer = new IndexWriter(dir, conf); AddNoProxDoc(writer); writer.Dispose(); - writer = new IndexWriter(dir, conf.SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES)); + writer = new IndexWriter(dir, + conf.SetMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES) + ); Term searchTerm = new Term("id", "7"); writer.DeleteDocuments(searchTerm); writer.Dispose(); @@ -655,7 +662,8 @@ public virtual DirectoryInfo CreateIndex(string dirName, bool doCFS, bool fullyM return indexDir; } - private void AddDoc(IndexWriter writer, int id) + // LUCENENET specific - made static + private static void AddDoc(IndexWriter writer, int id) { Document doc = new Document(); doc.Add(new TextField("content", "aaa", Field.Store.NO)); @@ -707,7 +715,8 @@ private void AddDoc(IndexWriter writer, int id) writer.AddDocument(doc); } - private void AddNoProxDoc(IndexWriter writer) + // LUCENENET specific - made static + private static void AddNoProxDoc(IndexWriter writer) { Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); @@ -722,7 +731,8 @@ private void AddNoProxDoc(IndexWriter writer) writer.AddDocument(doc); } - private int CountDocs(DocsEnum docs) + // LUCEENET specific - made static + private static int CountDocs(DocsEnum docs) { int count = 0; while ((docs.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) @@ -816,7 +826,6 @@ public virtual void TestNumericFields() { foreach (string name in oldNames) { - Directory dir = oldIndexDirs[name]; IndexReader reader = DirectoryReader.Open(dir); IndexSearcher searcher = new IndexSearcher(reader); @@ -861,7 +870,8 @@ public virtual void TestNumericFields() } } - private int CheckAllSegmentsUpgraded(Directory dir) + // LUCENENET specific - made static + private static int CheckAllSegmentsUpgraded(Directory dir) { SegmentInfos infos = new SegmentInfos(); infos.Read(dir); @@ -876,7 +886,8 @@ private int CheckAllSegmentsUpgraded(Directory dir) return infos.Count; } - private int GetNumberOfSegments(Directory dir) + // LUCENENET specific - made static + private static int GetNumberOfSegments(Directory dir) { SegmentInfos infos = new SegmentInfos(); infos.Read(dir); @@ -897,7 +908,8 @@ public virtual void TestUpgradeOldIndex() } Directory dir = NewDirectory(oldIndexDirs[name]); - (new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)).Upgrade(); + new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false) + .Upgrade(); CheckAllSegmentsUpgraded(dir); @@ -924,8 +936,9 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions() for (int i = 0; i < 3; i++) { // only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge: - MergePolicy mp = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy(); - IndexWriterConfig iwc = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(mp); + MergePolicy mp = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy(); + IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(mp); IndexWriter w = new IndexWriter(ramDir, iwc); // add few more docs: for (int j = 0; j < RandomMultiplier * Random.Next(30); j++) @@ -937,8 +950,9 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions() // add dummy segments (which are all in current // version) to single segment index - MergePolicy mp_ = Random.NextBoolean() ? (MergePolicy)NewLogMergePolicy() : NewTieredMergePolicy(); - IndexWriterConfig iwc_ = (new IndexWriterConfig(TEST_VERSION_CURRENT, null)).SetMergePolicy(mp_); + MergePolicy mp_ = Random.NextBoolean() ? NewLogMergePolicy() : NewTieredMergePolicy(); + IndexWriterConfig iwc_ = new IndexWriterConfig(TEST_VERSION_CURRENT, null) + .SetMergePolicy(mp_); IndexWriter w_ = new IndexWriter(dir, iwc_); w_.AddIndexes(ramDir); w_.Dispose(false); @@ -946,7 +960,8 @@ public virtual void TestUpgradeOldSingleSegmentIndexWithAdditions() // determine count of segments in modified index int origSegCount = GetNumberOfSegments(dir); - (new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false)).Upgrade(); + new IndexUpgrader(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null), false) + .Upgrade(); int segCount = CheckAllSegmentsUpgraded(dir); Assert.AreEqual(origSegCount, segCount, "Index must still contain the same number of segments, as only one segment was upgraded and nothing else merged"); diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs index a0337168f9..b81ba92038 100644 --- a/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs +++ b/src/Lucene.Net.Tests/Index/TestBagOfPositions.cs @@ -33,7 +33,6 @@ namespace Lucene.Net.Index * limitations under the License. */ - using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; using Document = Documents.Document; using Field = Field; @@ -51,7 +50,6 @@ namespace Lucene.Net.Index // Lucene3x doesnt have totalTermFreq, so the test isn't interesting there. [TestFixture] public class TestBagOfPositions : LuceneTestCase - { [Test] [Slow] @@ -128,7 +126,7 @@ public virtual void Test() Document document = new Document(); Field field = new Field("field", "", fieldType); document.Add(field); - threads[threadID] = new ThreadAnonymousClass(this, numTerms, maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field); + threads[threadID] = new ThreadAnonymousClass(maxTermsPerDoc, postings, iw, startingGun, threadRandom, document, field); threads[threadID].Start(); } startingGun.Signal(); @@ -160,9 +158,6 @@ public virtual void Test() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestBagOfPositions outerInstance; - - private readonly int numTerms; private readonly int maxTermsPerDoc; private readonly ConcurrentQueue postings; private readonly RandomIndexWriter iw; @@ -171,10 +166,8 @@ private sealed class ThreadAnonymousClass : ThreadJob private readonly Document document; private readonly Field field; - public ThreadAnonymousClass(TestBagOfPositions outerInstance, int numTerms, int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun, Random threadRandom, Document document, Field field) + public ThreadAnonymousClass(int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun, Random threadRandom, Document document, Field field) { - this.outerInstance = outerInstance; - this.numTerms = numTerms; this.maxTermsPerDoc = maxTermsPerDoc; this.postings = postings; this.iw = iw; @@ -189,7 +182,7 @@ public override void Run() try { startingGun.Wait(); - while (!(postings.Count == 0)) + while (!postings.IsEmpty) { StringBuilder text = new StringBuilder(); int numTerms = threadRandom.Next(maxTermsPerDoc); @@ -213,4 +206,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs index 8d9ea475eb..87b8324853 100644 --- a/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs +++ b/src/Lucene.Net.Tests/Index/TestBagOfPostings.cs @@ -32,7 +32,6 @@ namespace Lucene.Net.Index * limitations under the License. */ - using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; using Document = Documents.Document; using Field = Field; @@ -102,7 +101,7 @@ public virtual void Test() for (int threadID = 0; threadID < threadCount; threadID++) { - threads[threadID] = new ThreadAnonymousClass(this, maxTermsPerDoc, postings, iw, startingGun); + threads[threadID] = new ThreadAnonymousClass(maxTermsPerDoc, postings, iw, startingGun); threads[threadID].Start(); } startingGun.Signal(); @@ -140,16 +139,13 @@ public virtual void Test() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestBagOfPostings outerInstance; - private readonly int maxTermsPerDoc; private readonly ConcurrentQueue postings; private readonly RandomIndexWriter iw; private readonly CountdownEvent startingGun; - public ThreadAnonymousClass(TestBagOfPostings outerInstance, int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun) + public ThreadAnonymousClass(int maxTermsPerDoc, ConcurrentQueue postings, RandomIndexWriter iw, CountdownEvent startingGun) { - this.outerInstance = outerInstance; this.maxTermsPerDoc = maxTermsPerDoc; this.postings = postings; this.iw = iw; @@ -164,7 +160,7 @@ public override void Run() Field field = NewTextField("field", "", Field.Store.NO); document.Add(field); startingGun.Wait(); - while (!(postings.Count == 0)) + while (!postings.IsEmpty) { StringBuilder text = new StringBuilder(); ISet visited = new JCG.HashSet(); @@ -195,4 +191,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs index 838cc8f43e..bf79d132fb 100644 --- a/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs +++ b/src/Lucene.Net.Tests/Index/TestBinaryDocValuesUpdates.cs @@ -97,7 +97,8 @@ internal static BytesRef ToBytes(long value) return bytes; } - private Document Doc(int id) + // LUCENENET-specific: made static + private static Document Doc(int id) { Document doc = new Document(); doc.Add(new StringField("id", "doc-" + id, Store.NO)); @@ -111,7 +112,8 @@ private Document Doc(int id) public virtual void TestUpdatesAreFlushed() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001)); writer.AddDocument(Doc(0)); // val=1 writer.AddDocument(Doc(1)); // val=2 writer.AddDocument(Doc(3)); // val=2 @@ -152,8 +154,8 @@ public virtual void TestSimple() { writer.Dispose(); reader = DirectoryReader.Open(dir); - } // NRT - else + } + else // NRT { reader = DirectoryReader.Open(writer, true); writer.Dispose(); @@ -178,7 +180,7 @@ public virtual void TestUpdateFewSegments() conf.SetMaxBufferedDocs(2); // generate few segments conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test IndexWriter writer = new IndexWriter(dir, conf); - int numDocs = 10; + const int numDocs = 10; long[] expectedValues = new long[numDocs]; for (int i = 0; i < numDocs; i++) { @@ -203,8 +205,8 @@ public virtual void TestUpdateFewSegments() { writer.Dispose(); reader = DirectoryReader.Open(dir); - } // NRT - else + } + else // NRT { reader = DirectoryReader.Open(writer, true); writer.Dispose(); @@ -302,8 +304,8 @@ public virtual void TestUpdatesAndDeletes() { writer.Dispose(); reader = DirectoryReader.Open(dir); - } // NRT - else + } + else // NRT { reader = DirectoryReader.Open(writer, true); writer.Dispose(); @@ -355,8 +357,8 @@ public virtual void TestUpdatesWithDeletes() { writer.Dispose(); reader = DirectoryReader.Open(dir); - } // NRT - else + } + else // NRT { reader = DirectoryReader.Open(writer, true); writer.Dispose(); @@ -395,8 +397,8 @@ public virtual void TestUpdateAndDeleteSameDocument() { writer.Dispose(); reader = DirectoryReader.Open(dir); - } // NRT - else + } + else // NRT { reader = DirectoryReader.Open(writer, true); writer.Dispose(); @@ -669,7 +671,7 @@ public virtual void TestDifferentDVFormatPerField() { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - conf.SetCodec(new Lucene46CodecAnonymousClass(this)); + conf.SetCodec(new Lucene46CodecAnonymousClass()); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -702,13 +704,6 @@ public virtual void TestDifferentDVFormatPerField() private sealed class Lucene46CodecAnonymousClass : Lucene46Codec { - private readonly TestBinaryDocValuesUpdates outerInstance; - - public Lucene46CodecAnonymousClass(TestBinaryDocValuesUpdates outerInstance) - { - this.outerInstance = outerInstance; - } - public override DocValuesFormat GetDocValuesFormatForField(string field) { return new Lucene45DocValuesFormat(); @@ -1132,6 +1127,7 @@ public virtual void TestUpdateOldSegments() }; Directory dir = NewDirectory(); + bool oldValue = OldFormatImpersonationIsActive; // create a segment with an old Codec IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); conf.SetCodec(oldCodecs[Random.Next(oldCodecs.Length)]); @@ -1142,46 +1138,17 @@ public virtual void TestUpdateOldSegments() doc.Add(new BinaryDocValuesField("f", ToBytes(5L))); writer.AddDocument(doc); writer.Dispose(); - dir.Dispose(); - } - [Test, LuceneNetSpecific] - public virtual void TestUpdateOldSegments_OldFormatNotActive() - { - bool oldValue = OldFormatImpersonationIsActive; - - OldFormatImpersonationIsActive = false; - - Codec[] oldCodecs = new Codec[] { - new Lucene40RWCodec(), - new Lucene41RWCodec(), - new Lucene42RWCodec(), - new Lucene45RWCodec() - }; - - Directory dir = NewDirectory(); - Document doc = new Document(); - doc.Add(new StringField("id", "doc", Store.NO)); - doc.Add(new BinaryDocValuesField("f", ToBytes(5L))); - - var conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - conf.SetCodec(oldCodecs[Random.Next(oldCodecs.Length)]); - - var writer = new IndexWriter(dir, conf); - writer.AddDocument(doc); + conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); + writer = new IndexWriter(dir, conf); writer.UpdateBinaryDocValue(new Term("id", "doc"), "f", ToBytes(4L)); - - try - { + OldFormatImpersonationIsActive = false; + try { writer.Dispose(); Assert.Fail("should not have succeeded to update a segment written with an old Codec"); - } - catch (Exception e) when (e.IsUnsupportedOperationException()) - { + } catch (Exception e) when (e.IsUnsupportedOperationException()) { writer.Rollback(); - } - finally - { + } finally { OldFormatImpersonationIsActive = oldValue; }