Skip to content

Commit

Permalink
WIP Test review E-I through TestBinaryDocValuesUpdates, apache#259
Browse files Browse the repository at this point in the history
  • Loading branch information
paulirwin committed Nov 10, 2024
1 parent 974db69 commit 096d812
Show file tree
Hide file tree
Showing 17 changed files with 366 additions and 283 deletions.
31 changes: 16 additions & 15 deletions src/Lucene.Net.Tests/Index/Test2BBinaryDocValues.cs
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,14 @@ public virtual void TestFixedBinary()
{
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}
var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE);
IndexWriter w = new IndexWriter(dir, config);

IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));

Document doc = new Document();
var bytes = new byte[4];
Expand Down Expand Up @@ -116,13 +117,13 @@ public virtual void TestVariableBinary()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}

var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE);
IndexWriter w = new IndexWriter(dir, config);
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));

Document doc = new Document();
var bytes = new byte[4];
Expand Down Expand Up @@ -172,4 +173,4 @@ public virtual void TestVariableBinary()
dir.Dispose();
}
}
}
}
4 changes: 2 additions & 2 deletions src/Lucene.Net.Tests/Index/Test2BDocs.cs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public virtual void TestOverflow()
Arrays.Fill(subReaders, ir);
try
{
new MultiReader(subReaders);
_ = new MultiReader(subReaders); // LUCENENET-specific: discard result
Assert.Fail();
}
catch (Exception expected) when (expected.IsIllegalArgumentException())
Expand Down Expand Up @@ -97,4 +97,4 @@ public virtual void TestExactlyAtLimit()
dir2.Dispose();
}
}
}
}
11 changes: 6 additions & 5 deletions src/Lucene.Net.Tests/Index/Test2BPositions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ namespace Lucene.Net.Index
[SuppressCodecs("SimpleText", "Memory", "Direct")]
[TestFixture]
public class Test2BPositions : LuceneTestCase
// uses lots of space and takes a few minutes
{
// uses lots of space and takes a few minutes
[Ignore("Very slow. Enable manually by removing Ignore.")]
[Test]
public virtual void Test()
Expand Down Expand Up @@ -75,7 +75,7 @@ public virtual void Test()
Field field = new Field("field", new MyTokenStream(), ft);
doc.Add(field);

int numDocs = (int.MaxValue / 26) + 1;
const int numDocs = (int.MaxValue / 26) + 1;
for (int i = 0; i < numDocs; i++)
{
w.AddDocument(doc);
Expand All @@ -91,10 +91,11 @@ public virtual void Test()

public sealed class MyTokenStream : TokenStream
{
internal readonly ICharTermAttribute termAtt;
internal readonly IPositionIncrementAttribute posIncAtt;
private readonly ICharTermAttribute termAtt;
private readonly IPositionIncrementAttribute posIncAtt;
internal int index;

// LUCENENET-specific: must call AddAttribute from ctor in .NET
public MyTokenStream()
{
termAtt = AddAttribute<ICharTermAttribute>();
Expand All @@ -121,4 +122,4 @@ public override void Reset()
}
}
}
}
}
21 changes: 11 additions & 10 deletions src/Lucene.Net.Tests/Index/Test2BPostings.cs
Original file line number Diff line number Diff line change
Expand Up @@ -54,14 +54,14 @@ public virtual void Test()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}

var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE);
var iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE);

IndexWriter w = new IndexWriter(dir, config);
IndexWriter w = new IndexWriter(dir, iwc);

MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
Expand All @@ -77,7 +77,7 @@ public virtual void Test()
Field field = new Field("field", new MyTokenStream(), ft);
doc.Add(field);

int numDocs = (int.MaxValue / 26) + 1;
const int numDocs = (int.MaxValue / 26) + 1;
for (int i = 0; i < numDocs; i++)
{
w.AddDocument(doc);
Expand All @@ -93,9 +93,10 @@ public virtual void Test()

public sealed class MyTokenStream : TokenStream
{
internal readonly ICharTermAttribute termAtt;
private readonly ICharTermAttribute termAtt;
internal int index;

// LUCENENET-specific: must call AddAttribute from ctor in .NET
public MyTokenStream()
{
termAtt = AddAttribute<ICharTermAttribute>();
Expand All @@ -119,4 +120,4 @@ public override void Reset()
}
}
}
}
}
31 changes: 17 additions & 14 deletions src/Lucene.Net.Tests/Index/Test2BPostingsBytes.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,13 @@ namespace Lucene.Net.Index
/// so you get > Integer.MAX_VALUE postings data for the term
/// @lucene.experimental
/// </summary>
// disable Lucene3x: older lucene formats always had this issue.
[SuppressCodecs("SimpleText", "Memory", "Direct", "Lucene3x")]
[TestFixture]
public class Test2BPostingsBytes : LuceneTestCase
// disable Lucene3x: older lucene formats always had this issue.
// @Absurd @Ignore takes ~20GB-30GB of space and 10 minutes.
// with some codecs needs more heap space as well.
{
// @Absurd @Ignore takes ~20GB-30GB of space and 10 minutes.
// with some codecs needs more heap space as well.
[Ignore("Very slow. Enable manually by removing Ignore.")]
[Test]
public virtual void Test()
Expand All @@ -58,13 +58,13 @@ public virtual void Test()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}

var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE);
IndexWriter w = new IndexWriter(dir, config);
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));

MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
Expand Down Expand Up @@ -106,7 +106,8 @@ public virtual void Test()
{
((MockDirectoryWrapper)dir2).Throttling = Throttling.NEVER;
}
IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter w2 = new IndexWriter(dir2,
new IndexWriterConfig(TEST_VERSION_CURRENT, null));
w2.AddIndexes(mr);
w2.ForceMerge(1);
w2.Dispose();
Expand All @@ -121,7 +122,8 @@ public virtual void Test()
{
((MockDirectoryWrapper)dir3).Throttling = Throttling.NEVER;
}
IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter w3 = new IndexWriter(dir3,
new IndexWriterConfig(TEST_VERSION_CURRENT, null));
w3.AddIndexes(mr);
w3.ForceMerge(1);
w3.Dispose();
Expand All @@ -134,10 +136,11 @@ public virtual void Test()

public sealed class MyTokenStream : TokenStream
{
internal readonly ICharTermAttribute termAtt;
private readonly ICharTermAttribute termAtt;
internal int index;
internal int n;

// LUCENENET-specific: must call AddAttribute from ctor in .NET
public MyTokenStream()
{
termAtt = AddAttribute<ICharTermAttribute>();
Expand All @@ -162,4 +165,4 @@ public override void Reset()
}
}
}
}
}
29 changes: 15 additions & 14 deletions src/Lucene.Net.Tests/Index/Test2BSortedDocValues.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,13 @@ public virtual void TestFixedSorted()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}

IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));

Document doc = new Document();
var bytes = new byte[2];
Expand Down Expand Up @@ -110,13 +111,13 @@ public virtual void Test2BOrds()
((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
}

var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE);
IndexWriter w = new IndexWriter(dir, config);
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));

Document doc = new Document();
var bytes = new byte[4];
Expand Down Expand Up @@ -169,4 +170,4 @@ public virtual void Test2BOrds()

// TODO: variable
}
}
}
31 changes: 16 additions & 15 deletions src/Lucene.Net.Tests/Index/Test2BTerms.cs
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,12 @@ public class Test2BTerms : LuceneTestCase

private sealed class MyTokenStream : TokenStream
{
internal readonly int tokensPerDoc;
internal int tokenCount;
private readonly int tokensPerDoc;
private int tokenCount;
public readonly IList<BytesRef> savedTerms = new JCG.List<BytesRef>();
internal int nextSave;
internal long termCounter;
internal readonly Random random;
private int nextSave;
private long termCounter;
private readonly Random random;

public MyTokenStream(Random random, int tokensPerDoc)
: base(new MyAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY))
Expand Down Expand Up @@ -140,7 +140,7 @@ public override object Clone()

private sealed class MyAttributeFactory : AttributeFactory
{
internal readonly AttributeFactory @delegate;
private readonly AttributeFactory @delegate;

public MyAttributeFactory(AttributeFactory @delegate)
{
Expand Down Expand Up @@ -172,7 +172,7 @@ public virtual void Test2BTerms_Mem()
throw RuntimeException.Create("this test cannot run with PreFlex codec");
}
Console.WriteLine("Starting Test2B");
long TERM_COUNT = ((long)int.MaxValue) + 100000000;
const long TERM_COUNT = ((long)int.MaxValue) + 100000000;

int TERMS_PER_DOC = TestUtil.NextInt32(Random, 100000, 1000000);

Expand All @@ -188,12 +188,13 @@ public virtual void Test2BTerms_Mem()

if (true)
{
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));

MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
Expand All @@ -202,7 +203,7 @@ public virtual void Test2BTerms_Mem()
((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
}

Documents.Document doc = new Documents.Document();
Document doc = new Document();
MyTokenStream ts = new MyTokenStream(Random, TERMS_PER_DOC);

FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
Expand Down Expand Up @@ -311,4 +312,4 @@ private void TestSavedTerms(IndexReader r, IList<BytesRef> terms)
Assert.IsFalse(failed);
}
}
}
}
16 changes: 8 additions & 8 deletions src/Lucene.Net.Tests/Index/Test4GBStoredFields.cs
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,13 @@ public virtual void Test()
MockDirectoryWrapper dir = new MockDirectoryWrapper(Random, new MMapDirectory(CreateTempDir("4GBStoredFields")));
dir.Throttling = Throttling.NEVER;

var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE);
IndexWriter w = new IndexWriter(dir, config);
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))
.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.SetRAMBufferSizeMB(256.0)
.SetMergeScheduler(new ConcurrentMergeScheduler())
.SetMergePolicy(NewLogMergePolicy(false, 10))
.SetOpenMode(OpenMode.CREATE));

MergePolicy mp = w.Config.MergePolicy;
if (mp is LogByteSizeMergePolicy)
Expand Down Expand Up @@ -129,4 +129,4 @@ public virtual void Test()
dir.Dispose();
}
}
}
}
Loading

0 comments on commit 096d812

Please sign in to comment.