Skip to content

Commit

Permalink
ShingleFilter produces invalid queries
Browse files Browse the repository at this point in the history
  • Loading branch information
tohidemyname committed Jun 5, 2024
1 parent 38a7b53 commit 3dd6a9b
Show file tree
Hide file tree
Showing 2 changed files with 108 additions and 4 deletions.
13 changes: 11 additions & 2 deletions src/Lucene.Net.Analysis.Common/Analysis/Shingle/ShingleFilter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,16 @@ public override bool IncrementToken()
noShingleOutput = false;
}
offsetAtt.SetOffset(offsetAtt.StartOffset, nextToken.offsetAtt.EndOffset);
posLenAtt.PositionLength = builtGramSize;
// posLenAtt.PositionLength = builtGramSize;
if (outputUnigrams)
{
posLenAtt.PositionLength = builtGramSize;
}
else
{
// position length for this token is the number of position created by shingles of smaller size.
posLenAtt.PositionLength = Math.Max(1, (builtGramSize - minShingleSize) + 1);
}
isOutputHere = true;
gramSize.Advance();
tokenAvailable = true;
Expand Down Expand Up @@ -685,4 +694,4 @@ public InputWindowToken(AttributeSource attSource)
}
}
}
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Lucene version compatibility level 4.8.1
// Lucene version compatibility level 4.8.1
using Lucene.Net.Analysis.Core;
using Lucene.Net.Analysis.TokenAttributes;
using NUnit.Framework;
Expand Down Expand Up @@ -408,6 +408,101 @@ public virtual void TestPositionIncrementGreaterThanNWithoutUnigrams()
this.shingleFilterTest(2, 3, TEST_TOKEN_POS_INCR_GREATER_THAN_N, TRI_GRAM_TOKENS_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS, TRI_GRAM_POSITION_INCREMENTS_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS, TRI_GRAM_TYPES_POS_INCR_GREATER_THAN_N_WITHOUT_UNIGRAMS, false);
}



[Test]
public void testPositionLength()
{
Analyzer a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) =>
{
MockBytesAttributeFactory factory = new MockBytesAttributeFactory();
Tokenizer tokenizer = new MockTokenizer(factory, reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
ShingleFilter filter = new ShingleFilter(tokenizer, 4, 4);
filter.SetOutputUnigrams(false);
return new TokenStreamComponents(tokenizer, filter);
});

AssertTokenStreamContents(a.GetTokenStream("", "to be or not to be"),
new String[] {"to be or not", "be or not to", "or not to be"},
new int[] {0, 3, 6},
new int[] { 12, 15, 18 },
null,
new int[] { 1, 1, 1 },
new int[] { 1, 1, 1 },
18,
// offsets are correct but assertTokenStreamContents does not handle multiple terms with different offsets
// finishing at the same position
false);


a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) =>
{
MockBytesAttributeFactory factory = new MockBytesAttributeFactory();
Tokenizer tokenizer = new MockTokenizer(factory, reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
ShingleFilter filter = new ShingleFilter(tokenizer, 2, 4);
filter.SetOutputUnigrams(false);
return new TokenStreamComponents(tokenizer, filter);
});

AssertTokenStreamContents(a.GetTokenStream("", "to be or not to be"),
new String[] {"to be", "to be or", "to be or not", "be or", "be or not", "be or not to", "or not", "or not to",
"or not to be", "not to", "not to be", "to be"},
new int[] { 0, 0, 0, 3, 3, 3, 6, 6, 6, 9, 9, 13 },
new int[] { 5, 8, 12, 8, 12, 15, 12, 15, 18, 15, 18, 18 },
null,
new int[] { 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1 },
new int[] { 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 1 },
18,
// offsets are correct but assertTokenStreamContents does not handle multiple terms with different offsets
// finishing at the same position
false);

a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) =>
{
MockBytesAttributeFactory factory = new MockBytesAttributeFactory();
Tokenizer tokenizer = new MockTokenizer(factory, reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
ShingleFilter filter = new ShingleFilter(tokenizer, 3, 4);
filter.SetOutputUnigrams(false);
return new TokenStreamComponents(tokenizer, filter);
});


AssertTokenStreamContents(a.GetTokenStream("", "to be or not to be"),
new String[] {"to be or", "to be or not", "be or not", "be or not to", "or not to",
"or not to be", "not to be"},
new int[] { 0, 0, 3, 3, 6, 6, 9 },
new int[] { 8, 12, 12, 15, 15, 18, 18 },
null,
new int[] { 1, 0, 1, 0, 1, 0, 1, 0 },
new int[] { 1, 2, 1, 2, 1, 2, 1, 2 },
18,
// offsets are correct but assertTokenStreamContents does not handle multiple terms with different offsets
// finishing at the same position
false);

a = Analyzer.NewAnonymous(createComponents: (fieldName, reader) =>
{
MockBytesAttributeFactory factory = new MockBytesAttributeFactory();
Tokenizer tokenizer = new MockTokenizer(factory, reader, MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH);
ShingleFilter filter = new ShingleFilter(tokenizer, 3, 5);
filter.SetOutputUnigrams(false);
return new TokenStreamComponents(tokenizer, filter);
});

AssertTokenStreamContents(a.GetTokenStream("", "to be or not to be"),
new String[] {"to be or", "to be or not", "to be or not to", "be or not", "be or not to",
"be or not to be", "or not to", "or not to be", "not to be"},
new int[] { 0, 0, 0, 3, 3, 3, 6, 6, 9, 9 },
new int[] { 8, 12, 15, 12, 15, 18, 15, 18, 18 },
null,
new int[] { 1, 0, 0, 1, 0, 0, 1, 0, 1, 0 },
new int[] { 1, 2, 3, 1, 2, 3, 1, 2, 1 },
18,
// offsets are correct but assertTokenStreamContents does not handle multiple terms with different offsets
// finishing at the same position
false);
}

[Test]
public virtual void TestReset()
{
Expand Down Expand Up @@ -618,4 +713,4 @@ public virtual void TestTwoTrailingHolesTriShingleWithTokenFiller()
AssertTokenStreamContents(filter, new string[] { "purple", "purplewizard", "purplewizard", "wizard", "wizard", "wizard" }, new int[] { 0, 0, 0, 7, 7, 7 }, new int[] { 6, 13, 20, 13, 20, 20 }, new int[] { 1, 0, 0, 1, 0, 0 }, 20);
}
}
}
}

0 comments on commit 3dd6a9b

Please sign in to comment.