Imported Upstream version 3.6.0

Former-commit-id: da6be194a6b1221998fc28233f2503bd61dd9d14
This commit is contained in:
Jo Shields
2014-08-13 10:39:27 +01:00
commit a575963da9
50588 changed files with 8155799 additions and 0 deletions

View File

@@ -0,0 +1,65 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using Lucene.Net.Spatial.BBox;
using Spatial4n.Core.Context;
using NUnit.Framework;
using Spatial4n.Core.Shapes;
namespace Lucene.Net.Contrib.Spatial.Test.BBox
{
public class TestBBoxStrategy : StrategyTestCase
{
[SetUp]
public override void SetUp()
{
base.SetUp();
this.ctx = SpatialContext.GEO;
this.strategy = new BBoxStrategy(ctx, "bbox");
}
protected override Shape convertShapeFromGetDocuments(Spatial4n.Core.Shapes.Shape shape)
{
return shape.GetBoundingBox();
}
[Test]
public void testBasicOperaions()
{
getAddAndVerifyIndexedDocuments(DATA_SIMPLE_BBOX);
executeQueries(SpatialMatchConcern.EXACT, QTEST_Simple_Queries_BBox);
}
[Test]
public void testStatesBBox()
{
getAddAndVerifyIndexedDocuments(DATA_STATES_BBOX);
executeQueries(SpatialMatchConcern.FILTER, QTEST_States_IsWithin_BBox);
executeQueries(SpatialMatchConcern.FILTER, QTEST_States_Intersects_BBox);
}
[Test]
public void testCitiesIntersectsBBox()
{
getAddAndVerifyIndexedDocuments(DATA_WORLD_CITIES_POINTS);
executeQueries(SpatialMatchConcern.FILTER, QTEST_Cities_Intersects_BBox);
}
}
}

View File

@@ -0,0 +1,257 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Util;
namespace Lucene.Net.Contrib.Spatial.Test
{
/// <summary>
/// Utility class for asserting expected hits in tests.
///
/// Taken from apache.lucene.search
/// </summary>
public class CheckHits : LuceneTestCase
{
/*
* Asserts that the explanation value for every document matching a
* query corresponds with the true score. Optionally does "deep"
* testing of the explanation details.
*
* @see ExplanationAsserter
* @param query the query to test
* @param searcher the searcher to test the query against
* @param defaultFieldName used for displaing the query in assertion messages
* @param deep indicates whether a deep comparison of sub-Explanation details should be executed
*/
public static void checkExplanations(Query query,
String defaultFieldName,
IndexSearcher searcher,
bool deep = false)
{
searcher.Search(query,
new ExplanationAsserter
(query, defaultFieldName, searcher, deep));
}
public class ExplanationAsserter : Collector
{
/*
* Some explains methods calculate their values though a slightly
* different order of operations from the actual scoring method ...
* this allows for a small amount of relative variation
*/
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.001f;
/*
* In general we use a relative epsilon, but some tests do crazy things
* like boost documents with 0, creating tiny tiny scores where the
* relative difference is large but the absolute difference is tiny.
* we ensure the the epsilon is always at least this big.
*/
public static float EXPLAIN_SCORE_TOLERANCE_MINIMUM = 1e-6f;
private Query q;
private IndexSearcher s;
private String d;
private bool deep;
private Scorer scorer;
private int @base = 0;
/* Constructs an instance which does shallow tests on the Explanation */
public ExplanationAsserter(Query q, String defaultFieldName, IndexSearcher s)
: this(q, defaultFieldName, s, false)
{
}
public ExplanationAsserter(Query q, String defaultFieldName, IndexSearcher s, bool deep)
{
this.q = q;
this.s = s;
this.d = q.ToString(defaultFieldName);
this.deep = deep;
}
public override void SetScorer(Scorer scorer)
{
this.scorer = scorer;
}
public override void Collect(int doc)
{
Explanation exp = null;
doc = doc + @base;
try
{
exp = s.Explain(q, doc);
}
catch (IOException e)
{
throw new Exception
("exception in hitcollector of [[" + d + "]] for #" + doc, e);
}
assertNotNull("Explanation of [[" + d + "]] for #" + doc + " is null", exp);
verifyExplanation(d, doc, scorer.Score(), deep, exp);
assertTrue("Explanation of [[" + d + "]] for #" + doc +
" does not indicate match: " + exp.ToString(), exp.IsMatch);
}
public override void SetNextReader(IndexReader reader, int docBase)
{
@base = docBase;
}
public override bool AcceptsDocsOutOfOrder
{
get { return true; }
}
/*
* Assert that an explanation has the expected score, and optionally that its
* sub-details max/sum/factor match to that score.
*
* @param q String representation of the query for assertion messages
* @param doc Document ID for assertion messages
* @param score Real score value of doc with query q
* @param deep indicates whether a deep comparison of sub-Explanation details should be executed
* @param expl The Explanation to match against score
*/
public static void verifyExplanation(String q,
int doc,
float score,
bool deep,
Explanation expl)
{
float value = expl.Value;
assertEquals(q + ": score(doc=" + doc + ")=" + score +
" != explanationScore=" + value + " Explanation: " + expl,
score, value, explainToleranceDelta(score, value));
if (!deep) return;
var detail = expl.GetDetails();
// TODO: can we improve this entire method? its really geared to work only with TF/IDF
if (expl.Description.EndsWith("computed from:"))
{
return; // something more complicated.
}
if (detail != null)
{
if (detail.Length == 1)
{
// simple containment, unless its a freq of: (which lets a query explain how the freq is calculated),
// just verify contained expl has same score
if (!expl.Description.EndsWith("with freq of:"))
verifyExplanation(q, doc, score, deep, detail[0]);
}
else
{
// explanation must either:
// - end with one of: "product of:", "sum of:", "max of:", or
// - have "max plus <x> times others" (where <x> is float).
float x = 0;
String descr = expl.Description.ToLowerInvariant();
bool productOf = descr.EndsWith("product of:");
bool sumOf = descr.EndsWith("sum of:");
bool maxOf = descr.EndsWith("max of:");
bool maxTimesOthers = false;
if (!(productOf || sumOf || maxOf))
{
// maybe 'max plus x times others'
int k1 = descr.IndexOf("max plus ");
if (k1 >= 0)
{
k1 += "max plus ".Length;
int k2 = descr.IndexOf(" ", k1);
try
{
x = float.Parse(descr.Substring(k1, k2).Trim());
if (descr.Substring(k2).Trim().Equals("times others of:"))
{
maxTimesOthers = true;
}
}
catch (FormatException e)
{
}
}
}
// TODO: this is a TERRIBLE assertion!!!!
assertTrue(
q + ": multi valued explanation description=\"" + descr
+ "\" must be 'max of plus x times others' or end with 'product of'"
+ " or 'sum of:' or 'max of:' - " + expl,
productOf || sumOf || maxOf || maxTimesOthers);
float sum = 0;
float product = 1;
float max = 0;
for (int i = 0; i < detail.Length; i++)
{
float dval = detail[i].Value;
verifyExplanation(q, doc, dval, deep, detail[i]);
product *= dval;
sum += dval;
max = Math.Max(max, dval);
}
float combined = 0;
if (productOf)
{
combined = product;
}
else if (sumOf)
{
combined = sum;
}
else if (maxOf)
{
combined = max;
}
else if (maxTimesOthers)
{
combined = max + x * (sum - max);
}
else
{
assertTrue("should never get here!", false);
}
assertEquals(q + ": actual subDetails combined==" + combined +
" != value=" + value + " Explanation: " + expl,
combined, value, explainToleranceDelta(combined, value));
}
}
}
/* returns a reasonable epsilon for comparing two floats,
* where minor differences are acceptable such as score vs. explain */
public static float explainToleranceDelta(float f1, float f2)
{
return Math.Max(EXPLAIN_SCORE_TOLERANCE_MINIMUM, Math.Max(Math.Abs(f1), Math.Abs(f2)) * EXPLAIN_SCORE_TOLERANCE_DELTA);
}
}
}
}

View File

@@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Spatial.Util;
using Lucene.Net.Store;
using Lucene.Net.Util;
using NUnit.Framework;
namespace Lucene.Net.Contrib.Spatial.Test.Compatibility
{
public class TermsFilterTest : LuceneTestCase
{
[Test]
public void testCachability()
{
TermsFilter a = new TermsFilter();
a.AddTerm(new Term("field1", "a"));
a.AddTerm(new Term("field1", "b"));
HashSet<Filter> cachedFilters = new HashSet<Filter>();
cachedFilters.Add(a);
TermsFilter b = new TermsFilter();
b.AddTerm(new Term("field1", "a"));
b.AddTerm(new Term("field1", "b"));
Assert.True(cachedFilters.Contains(b), "Must be cached");
b.AddTerm(new Term("field1", "a")); //duplicate term
Assert.True(cachedFilters.Contains(b), "Must be cached");
b.AddTerm(new Term("field1", "c"));
Assert.False(cachedFilters.Contains(b), "Must not be cached");
}
[Test]
public void testMissingTerms()
{
String fieldName = "field1";
Directory rd = new RAMDirectory();
var w = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
for (int i = 0; i < 100; i++)
{
var doc = new Document();
int term = i*10; //terms are units of 10;
doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED));
w.AddDocument(doc);
}
IndexReader reader = w.GetReader();
w.Close();
TermsFilter tf = new TermsFilter();
tf.AddTerm(new Term(fieldName, "19"));
FixedBitSet bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");
tf.AddTerm(new Term(fieldName, "20"));
bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(1, bits.Cardinality(), "Must match 1");
tf.AddTerm(new Term(fieldName, "10"));
bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(2, bits.Cardinality(), "Must match 2");
tf.AddTerm(new Term(fieldName, "00"));
bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(2, bits.Cardinality(), "Must match 2");
reader.Close();
rd.Close();
}
// [Test]
// public void testMissingField()
// {
// String fieldName = "field1";
// Directory rd1 = new RAMDirectory();
// var w1 = new IndexWriter(rd1, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
// var doc = new Document();
// doc.Add(new Field(fieldName, "content1", Field.Store.YES, Field.Index.ANALYZED));
// w1.AddDocument(doc);
// IndexReader reader1 = w1.GetReader();
// w1.Close();
// fieldName = "field2";
// Directory rd2 = new RAMDirectory();
// var w2 = new IndexWriter(rd2, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
// doc = new Document();
// doc.Add(new Field(fieldName, "content2", Field.Store.YES, Field.Index.ANALYZED));
// w2.AddDocument(doc);
// IndexReader reader2 = w2.GetReader();
// w2.Close();
// TermsFilter tf = new TermsFilter();
// tf.AddTerm(new Term(fieldName, "content1"));
// MultiReader multi = new MultiReader(reader1, reader2);
// foreach (var reader in multi.Leaves())
// {
// FixedBitSet bits = (FixedBitSet) tf.GetDocIdSet(reader);
// Assert.True(bits.Cardinality() >= 0, "Must be >= 0");
// }
// multi.Close();
// reader1.Close();
// reader2.Close();
// rd1.Close();
// rd2.Close();
// }
}
}

View File

@@ -0,0 +1,360 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using Lucene.Net.Search;
using Lucene.Net.Spatial.Util;
using Lucene.Net.Util;
using NUnit.Framework;
namespace Lucene.Net.Contrib.Spatial.Test.Compatibility
{
public static class BitArrayExtensions
{
public static int NextSetBit(this BitArray arr, int fromIndex)
{
if (fromIndex >= arr.Length)
throw new ArgumentException("Invalid fromIndex", "fromIndex");
for (var i = fromIndex; i < arr.Length; i++)
{
if (arr[i]) return i;
}
return -1;
}
}
public class TestFixedBitSet : LuceneTestCase
{
private static readonly Random rnd = new Random((int)DateTimeOffset.Now.Ticks);
void doGet(BitArray a, FixedBitSet b)
{
int max = b.Length();
for (int i = 0; i < max; i++)
{
if (a.Get(i) != b.Get(i))
{
Assert.Fail("mismatch: BitSet=[" + i + "]=" + a.Get(i));
}
}
}
void doNextSetBit(BitArray a, FixedBitSet b)
{
int aa = -1, bb = -1;
do
{
aa = a.NextSetBit(aa + 1);
bb = bb < b.Length() - 1 ? b.NextSetBit(bb + 1) : -1;
Assert.AreEqual(aa, bb);
} while (aa >= 0);
}
void doPrevSetBit(BitArray a, FixedBitSet b)
{
int aa = a.Length + rnd.Next(100);
int bb = aa;
do
{
// aa = a.prevSetBit(aa-1);
aa--;
while ((aa >= 0) && (!a.Get(aa)))
{
aa--;
}
if (b.Length() == 0)
{
bb = -1;
}
else if (bb > b.Length() - 1)
{
bb = b.PrevSetBit(b.Length() - 1);
}
else if (bb < 1)
{
bb = -1;
}
else
{
bb = bb >= 1 ? b.PrevSetBit(bb - 1) : -1;
}
Assert.AreEqual(aa, bb);
} while (aa >= 0);
}
// test interleaving different FixedBitSetIterator.next()/skipTo()
//void doIterate(BitArray a, FixedBitSet b, int mode)
//{
// if (mode == 1) doIterate1(a, b);
// if (mode == 2) doIterate2(a, b);
//}
//void doIterate1(BitArray a, FixedBitSet b)
//{
// int aa = -1, bb = -1;
// DocIdSetIterator iterator = b.iterator();
// do
// {
// aa = a.NextSetBit(aa + 1);
// bb = (bb < b.Length() && random().nextBoolean()) ? iterator.NextDoc() : iterator.Advance(bb + 1);
// Assert.AreEqual(aa == -1 ? DocIdSetIterator.NO_MORE_DOCS : aa, bb);
// } while (aa >= 0);
//}
//void doIterate2(BitArray a, FixedBitSet b)
//{
// int aa = -1, bb = -1;
// DocIdSetIterator iterator = b.iterator();
// do
// {
// aa = a.NextSetBit(aa + 1);
// bb = random().nextBoolean() ? iterator.NextDoc() : iterator.Advance(bb + 1);
// Assert.AreEqual(aa == -1 ? DocIdSetIterator.NO_MORE_DOCS : aa, bb);
// } while (aa >= 0);
//}
//void doRandomSets(int maxSize, int iter, int mode)
//{
// BitArray a0 = null;
// FixedBitSet b0 = null;
// for (int i = 0; i < iter; i++)
// {
// int sz = _TestUtil.nextInt(random(), 2, maxSize);
// BitSet a = new BitSet(sz);
// FixedBitSet b = new FixedBitSet(sz);
// // test the various ways of setting bits
// if (sz > 0)
// {
// int nOper = random().nextInt(sz);
// for (int j = 0; j < nOper; j++)
// {
// int idx;
// idx = random().nextInt(sz);
// a.set(idx);
// b.set(idx);
// idx = random().nextInt(sz);
// a.clear(idx);
// b.clear(idx);
// idx = random().nextInt(sz);
// a.flip(idx);
// b.flip(idx, idx + 1);
// idx = random().nextInt(sz);
// a.flip(idx);
// b.flip(idx, idx + 1);
// boolean val2 = b.get(idx);
// boolean val = b.getAndSet(idx);
// assertTrue(val2 == val);
// assertTrue(b.get(idx));
// if (!val) b.clear(idx);
// assertTrue(b.get(idx) == val);
// }
// }
// // test that the various ways of accessing the bits are equivalent
// doGet(a, b);
// // test ranges, including possible extension
// int fromIndex, toIndex;
// fromIndex = random().nextInt(sz / 2);
// toIndex = fromIndex + random().nextInt(sz - fromIndex);
// BitSet aa = (BitSet)a.clone(); aa.flip(fromIndex, toIndex);
// FixedBitSet bb = b.clone(); bb.flip(fromIndex, toIndex);
// doIterate(aa, bb, mode); // a problem here is from flip or doIterate
// fromIndex = random().nextInt(sz / 2);
// toIndex = fromIndex + random().nextInt(sz - fromIndex);
// aa = (BitSet)a.clone(); aa.clear(fromIndex, toIndex);
// bb = b.clone(); bb.clear(fromIndex, toIndex);
// doNextSetBit(aa, bb); // a problem here is from clear() or nextSetBit
// doPrevSetBit(aa, bb);
// fromIndex = random().nextInt(sz / 2);
// toIndex = fromIndex + random().nextInt(sz - fromIndex);
// aa = (BitSet)a.clone(); aa.set(fromIndex, toIndex);
// bb = b.clone(); bb.set(fromIndex, toIndex);
// doNextSetBit(aa, bb); // a problem here is from set() or nextSetBit
// doPrevSetBit(aa, bb);
// if (b0 != null && b0.length() <= b.length())
// {
// assertEquals(a.cardinality(), b.cardinality());
// BitSet a_and = (BitSet)a.clone(); a_and.and(a0);
// BitSet a_or = (BitSet)a.clone(); a_or.or(a0);
// BitSet a_andn = (BitSet)a.clone(); a_andn.andNot(a0);
// FixedBitSet b_and = b.clone(); assertEquals(b, b_and); b_and.and(b0);
// FixedBitSet b_or = b.clone(); b_or.or(b0);
// FixedBitSet b_andn = b.clone(); b_andn.andNot(b0);
// assertEquals(a0.cardinality(), b0.cardinality());
// assertEquals(a_or.cardinality(), b_or.cardinality());
// doIterate(a_and, b_and, mode);
// doIterate(a_or, b_or, mode);
// doIterate(a_andn, b_andn, mode);
// assertEquals(a_and.cardinality(), b_and.cardinality());
// assertEquals(a_or.cardinality(), b_or.cardinality());
// assertEquals(a_andn.cardinality(), b_andn.cardinality());
// }
// a0 = a;
// b0 = b;
// }
//}
// large enough to flush obvious bugs, small enough to run in <.5 sec as part of a
// larger testsuite.
//public void testSmall()
//{
// doRandomSets(atLeast(1200), atLeast(1000), 1);
// doRandomSets(atLeast(1200), atLeast(1000), 2);
//}
// uncomment to run a bigger test (~2 minutes).
/*
public void testBig() {
doRandomSets(2000,200000, 1);
doRandomSets(2000,200000, 2);
}
*/
[Test]
public void testEquals()
{
// This test can't handle numBits==0:
int numBits = rnd.Next(2000) + 1;
FixedBitSet b1 = new FixedBitSet(numBits);
FixedBitSet b2 = new FixedBitSet(numBits);
Assert.IsTrue(b1.Equals(b2));
Assert.IsTrue(b2.Equals(b1));
for (int iter = 0; iter < 10 * rnd.Next(500); iter++)
{
int idx = rnd.Next(numBits);
if (!b1.Get(idx))
{
b1.Set(idx);
Assert.IsFalse(b1.Equals(b2));
Assert.IsFalse(b2.Equals(b1));
b2.Set(idx);
Assert.IsTrue(b1.Equals(b2));
Assert.IsTrue(b2.Equals(b1));
}
}
// try different type of object
Assert.IsFalse(b1.Equals(new Object()));
}
[Test]
public void testHashCodeEquals()
{
// This test can't handle numBits==0:
int numBits = rnd.Next(2000) + 1;
FixedBitSet b1 = new FixedBitSet(numBits);
FixedBitSet b2 = new FixedBitSet(numBits);
Assert.IsTrue(b1.Equals(b2));
Assert.IsTrue(b2.Equals(b1));
for (int iter = 0; iter < 10 * rnd.Next(500); iter++)
{
int idx = rnd.Next(numBits);
if (!b1.Get(idx))
{
b1.Set(idx);
Assert.IsFalse(b1.Equals(b2));
Assert.AreNotEqual(b1.GetHashCode(), b2.GetHashCode());
b2.Set(idx);
Assert.AreEqual(b1, b2);
Assert.AreEqual(b1.GetHashCode(), b2.GetHashCode());
}
}
}
[Test]
public void testSmallBitSets()
{
// Make sure size 0-10 bit sets are OK:
for (int numBits = 0; numBits < 10; numBits++)
{
FixedBitSet b1 = new FixedBitSet(numBits);
FixedBitSet b2 = new FixedBitSet(numBits);
Assert.IsTrue(b1.Equals(b2));
Assert.AreEqual(b1.GetHashCode(), b2.GetHashCode());
Assert.AreEqual(0, b1.Cardinality());
if (numBits > 0)
{
b1.Set(0, numBits);
Assert.AreEqual(numBits, b1.Cardinality());
//b1.Flip(0, numBits);
//Assert.AreEqual(0, b1.Cardinality());
}
}
}
private FixedBitSet makeFixedBitSet(int[] a, int numBits)
{
FixedBitSet bs = new FixedBitSet(numBits);
foreach (int e in a)
{
bs.Set(e);
}
return bs;
}
private BitArray makeBitSet(int[] a)
{
var bs = new BitArray(a.Length);
foreach (int e in a)
{
bs.Set(e, true);
}
return bs;
}
private void checkPrevSetBitArray(int[] a, int numBits)
{
FixedBitSet obs = makeFixedBitSet(a, numBits);
BitArray bs = makeBitSet(a);
doPrevSetBit(bs, obs);
}
[Test]
public void testPrevSetBit()
{
checkPrevSetBitArray(new int[] { }, 0);
checkPrevSetBitArray(new int[] { 0 }, 1);
checkPrevSetBitArray(new int[] { 0, 2 }, 3);
}
}
}

View File

@@ -0,0 +1,179 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProductVersion>9.0.21022</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{19FC2A6B-4DE9-403F-8CEF-10850F57B96E}</ProjectGuid>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Lucene.Net.Contrib.Spatial.Test</RootNamespace>
<AssemblyName>Lucene.Net.Contrib.Spatial.Test</AssemblyName>
<FileAlignment>512</FileAlignment>
<FileUpgradeFlags>
</FileUpgradeFlags>
<OldToolsVersion>3.5</OldToolsVersion>
<UpgradeBackupLocation />
<PublishUrl>publish\</PublishUrl>
<Install>true</Install>
<InstallFrom>Disk</InstallFrom>
<UpdateEnabled>false</UpdateEnabled>
<UpdateMode>Foreground</UpdateMode>
<UpdateInterval>7</UpdateInterval>
<UpdateIntervalUnits>Days</UpdateIntervalUnits>
<UpdatePeriodically>false</UpdatePeriodically>
<UpdateRequired>false</UpdateRequired>
<MapFileExtensions>true</MapFileExtensions>
<ApplicationRevision>0</ApplicationRevision>
<ApplicationVersion>1.0.0.%2a</ApplicationVersion>
<IsWebBootstrapper>false</IsWebBootstrapper>
<UseApplicationTrust>false</UseApplicationTrust>
<BootstrapperEnabled>true</BootstrapperEnabled>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<Framework>$(TargetFrameworkVersion.Replace("v", "NET").Replace(".", ""))</Framework>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>..\..\..\build\bin\contrib\Spatial\$(Configuration.Replace("35", ""))\$(Framework)\</OutputPath>
<DefineConstants>DEBUG;TRACE;$(Framework)</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<NoWarn>618</NoWarn>
<OutputType>Library</OutputType>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug35|AnyCPU' ">
<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
<Framework>$(TargetFrameworkVersion.Replace("v", "NET").Replace(".", ""))</Framework>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>..\..\..\build\bin\contrib\Spatial\$(Configuration.Replace("35", ""))\$(Framework)\</OutputPath>
<DefineConstants>DEBUG;TRACE;$(Framework)</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<NoWarn>618</NoWarn>
<OutputType>Library</OutputType>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<Framework>$(TargetFrameworkVersion.Replace("v", "NET").Replace(".", ""))</Framework>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>..\..\..\build\bin\contrib\Spatial\$(Configuration.Replace("35", ""))\$(Framework)\</OutputPath>
<DefineConstants>TRACE;$(Framework)</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<DebugSymbols>true</DebugSymbols>
<OutputType>Library</OutputType>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release35|AnyCPU' ">
<TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
<Framework>$(TargetFrameworkVersion.Replace("v", "NET").Replace(".", ""))</Framework>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>..\..\..\build\bin\contrib\Spatial\$(Configuration.Replace("35", ""))\$(Framework)\</OutputPath>
<DefineConstants>TRACE;$(Framework)</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<DebugSymbols>true</DebugSymbols>
<OutputType>Library</OutputType>
</PropertyGroup>
<ItemGroup>
<Reference Include="nunit.framework">
<HintPath>..\..\..\lib\NUnit.org\NUnit\2.5.9\bin\net-2.0\framework\nunit.framework.dll</HintPath>
</Reference>
<Reference Include="Spatial4n.Core.NTS, Version=0.3.0.0, Culture=neutral, PublicKeyToken=9f9456e1ca16d45e, processorArchitecture=MSIL">
<SpecificVersion>False</SpecificVersion>
<HintPath>..\..\..\lib\Spatial4n\$(Framework)\Spatial4n.Core.NTS.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Condition="'$(Framework)' == 'NET35'" Include="System.Core" />
</ItemGroup>
<ItemGroup>
<Compile Include="..\..\core\Util\LuceneTestCase.cs">
<Link>LuceneTestCase.cs</Link>
</Compile>
<Compile Include="..\..\core\Util\Paths.cs">
<Link>Paths.cs</Link>
</Compile>
<Compile Include="BBox\TestBBoxStrategy.cs" />
<Compile Include="CheckHits.cs" />
<Compile Include="Compatibility\TermsFilterTest.cs" />
<Compile Include="DistanceStrategyTest.cs" />
<Compile Include="PortedSolr3Test.cs" />
<Compile Include="Prefix\TestRecursivePrefixTreeStrategy.cs" />
<Compile Include="Prefix\TestTermQueryPrefixGridStrategy.cs" />
<Compile Include="Prefix\Tree\SpatialPrefixTreeTest.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Queries\SpatialArgsParserTest.cs" />
<Compile Include="SpatialMatchConcern.cs" />
<Compile Include="SpatialTestCase.cs" />
<Compile Include="SpatialTestQuery.cs" />
<Compile Include="StrategyTestCase.cs" />
<Compile Include="TestTestFramework.cs" />
<Compile Include="Vector\TestTwoDoublesStrategy.cs" />
</ItemGroup>
<ItemGroup>
<BootstrapperPackage Include=".NETFramework,Version=v4.0">
<Visible>False</Visible>
<ProductName>Microsoft .NET Framework 4 %28x86 and x64%29</ProductName>
<Install>true</Install>
</BootstrapperPackage>
<BootstrapperPackage Include="Microsoft.Net.Client.3.5">
<Visible>False</Visible>
<ProductName>.NET Framework 3.5 SP1 Client Profile</ProductName>
<Install>false</Install>
</BootstrapperPackage>
<BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
<Visible>False</Visible>
<ProductName>.NET Framework 3.5 SP1</ProductName>
<Install>false</Install>
</BootstrapperPackage>
<BootstrapperPackage Include="Microsoft.Windows.Installer.3.1">
<Visible>False</Visible>
<ProductName>Windows Installer 3.1</ProductName>
<Install>true</Install>
</BootstrapperPackage>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\contrib\Spatial\Contrib.Spatial.NTS.csproj">
<Project>{02d030d0-c7b5-4561-8bdd-41408b2e2f41}</Project>
<Name>Contrib.Spatial.NTS</Name>
</ProjectReference>
<ProjectReference Include="..\..\..\src\core\Lucene.Net.csproj">
<Project>{5D4AD9BE-1FFB-41AB-9943-25737971BF57}</Project>
<Name>Lucene.Net</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup />
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

View File

@@ -0,0 +1,131 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Lucene.Net.Documents;
using Lucene.Net.Spatial;
using Lucene.Net.Spatial.BBox;
using Lucene.Net.Spatial.Prefix;
using Lucene.Net.Spatial.Prefix.Tree;
using Lucene.Net.Spatial.Vector;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Shapes;
namespace Lucene.Net.Contrib.Spatial.Test
{
public class DistanceStrategyTest : StrategyTestCase
{
public class TestValuesProvider
{
public IEnumerable<Param> ParamsProvider()
{
var ctorArgs = new List<Param>();
SpatialContext ctx = SpatialContext.GEO;
SpatialPrefixTree grid;
SpatialStrategy strategy;
grid = new QuadPrefixTree(ctx, 25);
strategy = new RecursivePrefixTreeStrategy(grid, "recursive_quad");
ctorArgs.Add(new Param(strategy));
grid = new GeohashPrefixTree(ctx, 12);
strategy = new TermQueryPrefixTreeStrategy(grid, "termquery_geohash");
ctorArgs.Add(new Param(strategy));
strategy = new PointVectorStrategy(ctx, "pointvector");
ctorArgs.Add(new Param(strategy));
strategy = new BBoxStrategy(ctx, "bbox");
ctorArgs.Add(new Param(strategy));
return ctorArgs;
}
}
public class Param
{
public readonly SpatialStrategy strategy;
public Param(SpatialStrategy strategy) { this.strategy = strategy; }
public override String ToString()
{
return strategy.GetFieldName();
}
}
// private String fieldName;
public void Init(Param param)
{
SpatialStrategy strategy = param.strategy;
this.ctx = strategy.GetSpatialContext();
this.strategy = strategy;
}
[Test]
public void testDistanceOrder([ValueSource(typeof(TestValuesProvider), "ParamsProvider")] Param p)
{
Init(p);
adoc("100", ctx.MakePoint(2, 1));
adoc("101", ctx.MakePoint(-1, 4));
adoc("103", (Shape)null);//test score for nothing
commit();
//FYI distances are in docid order
checkDistValueSource("3,4", 2.8274937f, 5.0898066f, 180f);
checkDistValueSource("4,0", 3.6043684f, 0.9975641f, 180f);
}
[Test]
public void testRecipScore([ValueSource(typeof(TestValuesProvider), "ParamsProvider")] Param p)
{
Init(p);
Point p100 = ctx.MakePoint(2, 1);
adoc("100", p100);
Point p101 = ctx.MakePoint(-1, 4);
adoc("101", p101);
adoc("103", (Shape)null); //test score for nothing
commit();
double dist = ctx.GetDistCalc().Distance(p100, p101);
Shape queryShape = ctx.MakeCircle(2.01, 0.99, dist);
checkValueSource(strategy.MakeRecipDistanceValueSource(queryShape),
new float[] { 1.00f, 0.10f, 0f }, 0.09f);
}
protected override Document newDoc(String id, Shape shape)
{
//called by adoc(). Make compatible with BBoxStrategy.
if (shape != null && strategy is BBoxStrategy)
shape = ctx.MakeRectangle(shape.GetCenter(), shape.GetCenter());
return base.newDoc(id, shape);
}
void checkDistValueSource(String ptStr, params float[] distances)
{
Point pt = (Point)ctx.ReadShape(ptStr);
checkValueSource(strategy.MakeDistanceValueSource(pt), distances, 1.0e-4f);
}
}
}

View File

@@ -0,0 +1,194 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using Lucene.Net.Search;
using Lucene.Net.Spatial;
using Lucene.Net.Spatial.Prefix;
using Lucene.Net.Spatial.Prefix.Tree;
using Lucene.Net.Spatial.Queries;
using Lucene.Net.Spatial.Vector;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Distance;
using Spatial4n.Core.Io;
using Spatial4n.Core.Shapes;
namespace Lucene.Net.Contrib.Spatial.Test
{
/*
* Based off of Solr 3's SpatialFilterTest.
*/
public class PortedSolr3Test : StrategyTestCase
{
public class TestValuesProvider
{
public List<Param> dataList = new List<Param>();
public IEnumerable<Param> ParamsProvider()
{
var ctorArgs = new List<Param>();
SpatialContext ctx = SpatialContext.GEO;
SpatialPrefixTree grid = new GeohashPrefixTree(ctx, 12);
SpatialStrategy strategy = new RecursivePrefixTreeStrategy(grid, "recursive_geohash");
ctorArgs.Add(new Param(strategy));
grid = new QuadPrefixTree(ctx, 25);
strategy = new RecursivePrefixTreeStrategy(grid, "recursive_quad");
ctorArgs.Add(new Param(strategy));
grid = new GeohashPrefixTree(ctx, 12);
strategy = new TermQueryPrefixTreeStrategy(grid, "termquery_geohash");
ctorArgs.Add(new Param(strategy));
strategy = new PointVectorStrategy(ctx, "pointvector");
ctorArgs.Add(new Param(strategy));
return ctorArgs;
}
}
public class Param
{
public readonly SpatialStrategy strategy;
public Param(SpatialStrategy strategy) { this.strategy = strategy; }
public override String ToString()
{
return strategy.GetFieldName();
}
}
private Random random;
private void setupDocs()
{
random = NewRandom();
deleteAll();
adoc("1", "32.7693246, -79.9289094");
adoc("2", "33.7693246, -80.9289094");
adoc("3", "-32.7693246, 50.9289094");
adoc("4", "-50.7693246, 60.9289094");
adoc("5", "0,0");
adoc("6", "0.1,0.1");
adoc("7", "-0.1,-0.1");
adoc("8", "0,179.9");
adoc("9", "0,-179.9");
adoc("10", "89.9,50");
adoc("11", "89.9,-130");
adoc("12", "-89.9,50");
adoc("13", "-89.9,-130");
commit();
}
[Test, Sequential]
public void testIntersections([ValueSourceAttribute(typeof (TestValuesProvider), "ParamsProvider")] Param p)
{
this.ctx = p.strategy.GetSpatialContext();
this.strategy = p.strategy;
setupDocs();
//Try some edge cases
//NOTE: 2nd arg is distance in kilometers
checkHitsCircle("1,1", 175, 3, 5, 6, 7);
checkHitsCircle("0,179.8", 200, 2, 8, 9);
checkHitsCircle("89.8, 50", 200, 2, 10, 11); //this goes over the north pole
checkHitsCircle("-89.8, 50", 200, 2, 12, 13); //this goes over the south pole
//try some normal cases
checkHitsCircle("33.0,-80.0", 300, 2);
//large distance
checkHitsCircle("1,1", 5000, 3, 5, 6, 7);
//Because we are generating a box based on the west/east longitudes and the south/north latitudes, which then
//translates to a range query, which is slightly more inclusive. Thus, even though 0.0 is 15.725 kms away,
//it will be included, b/c of the box calculation.
checkHitsBBox("0.1,0.1", 15, 2, 5, 6);
//try some more
deleteAll();
adoc("14", "0,5");
adoc("15", "0,15");
//3000KM from 0,0, see http://www.movable-type.co.uk/scripts/latlong.html
adoc("16", "18.71111,19.79750");
adoc("17", "44.043900,-95.436643");
commit();
checkHitsCircle("0,0", 1000, 1, 14);
checkHitsCircle("0,0", 2000, 2, 14, 15);
checkHitsBBox("0,0", 3000, 3, 14, 15, 16);
checkHitsCircle("0,0", 3001, 3, 14, 15, 16);
checkHitsCircle("0,0", 3000.1, 3, 14, 15, 16);
//really fine grained distance and reflects some of the vagaries of how we are calculating the box
checkHitsCircle("43.517030,-96.789603", 109, 0);
// falls outside of the real distance, but inside the bounding box
checkHitsCircle("43.517030,-96.789603", 110, 0);
checkHitsBBox("43.517030,-96.789603", 110, 1, 17);
}
//---- these are similar to Solr test methods
private void checkHitsCircle(String ptStr, double distKM, int assertNumFound, params int[] assertIds)
{
_checkHits(false, ptStr, distKM, assertNumFound, assertIds);
}
private void checkHitsBBox(String ptStr, double distKM, int assertNumFound, params int[] assertIds)
{
_checkHits(true, ptStr, distKM, assertNumFound, assertIds);
}
private void _checkHits(bool bbox, String ptStr, double distKM, int assertNumFound, params int[] assertIds)
{
SpatialOperation op = SpatialOperation.Intersects;
Point pt = (Point) new ShapeReadWriter(ctx).ReadShape(ptStr);
double distDEG = DistanceUtils.Dist2Degrees(distKM, DistanceUtils.EARTH_MEAN_RADIUS_KM);
Shape shape = ctx.MakeCircle(pt, distDEG);
if (bbox)
shape = shape.GetBoundingBox();
SpatialArgs args = new SpatialArgs(op, shape);
//args.setDistPrecision(0.025);
Query query;
if (random.NextDouble() > 0.5)
{
query = strategy.MakeQuery(args);
}
else
{
query = new FilteredQuery(new MatchAllDocsQuery(), strategy.MakeFilter(args));
}
SearchResults results = executeQuery(query, 100);
assertEquals("" + shape, assertNumFound, results.numFound);
if (assertIds != null)
{
var resultIds = new HashSet<int>();
foreach (var result in results.results)
{
resultIds.Add(int.Parse(result.document.Get("id")));
}
foreach (int assertId in assertIds)
{
assertTrue("has " + assertId, resultIds.Contains(assertId));
}
}
}
}
}

View File

@@ -0,0 +1,211 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using Lucene.Net.Documents;
using Lucene.Net.Spatial.Prefix;
using Lucene.Net.Spatial.Prefix.Tree;
using Lucene.Net.Spatial.Queries;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Distance;
using Spatial4n.Core.Shapes;
using Spatial4n.Core.Shapes.Impl;
using Spatial4n.Core.Util;
namespace Lucene.Net.Contrib.Spatial.Test.Prefix
{
public class TestRecursivePrefixTreeStrategy : StrategyTestCase
{
private int maxLength;
//Tests should call this first.
private void init(int maxLength)
{
this.maxLength = maxLength;
this.ctx = SpatialContext.GEO;
var grid = new GeohashPrefixTree(ctx, maxLength);
this.strategy = new RecursivePrefixTreeStrategy(grid, GetType().Name);
}
[Test]
public void testFilterWithVariableScanLevel()
{
init(GeohashPrefixTree.GetMaxLevelsPossible());
getAddAndVerifyIndexedDocuments(DATA_WORLD_CITIES_POINTS);
//execute queries for each prefix grid scan level
for (int i = 0; i <= maxLength; i++)
{
((RecursivePrefixTreeStrategy) strategy).SetPrefixGridScanLevel(i);
executeQueries(SpatialMatchConcern.FILTER, QTEST_Cities_Intersects_BBox);
}
}
[Test]
public void testOneMeterPrecision()
{
init(GeohashPrefixTree.GetMaxLevelsPossible());
GeohashPrefixTree grid = (GeohashPrefixTree) ((RecursivePrefixTreeStrategy) strategy).GetGrid();
//DWS: I know this to be true. 11 is needed for one meter
double degrees = DistanceUtils.Dist2Degrees(0.001, DistanceUtils.EARTH_MEAN_RADIUS_KM);
assertEquals(11, grid.GetLevelForDistance(degrees));
}
[Test]
public void testPrecision()
{
init(GeohashPrefixTree.GetMaxLevelsPossible());
Point iPt = ctx.MakePoint(2.8028712999999925, 48.3708044); //lon, lat
addDocument(newDoc("iPt", iPt));
commit();
Point qPt = ctx.MakePoint(2.4632387000000335, 48.6003516);
double KM2DEG = DistanceUtils.Dist2Degrees(1, DistanceUtils.EARTH_MEAN_RADIUS_KM);
double DEG2KM = 1/KM2DEG;
const double DIST = 35.75; //35.7499...
assertEquals(DIST, ctx.GetDistCalc().Distance(iPt, qPt)*DEG2KM, 0.001);
//distErrPct will affect the query shape precision. The indexed precision
// was set to nearly zilch via init(GeohashPrefixTree.getMaxLevelsPossible());
const double distErrPct = 0.025; //the suggested default, by the way
const double distMult = 1 + distErrPct;
assertTrue(35.74*distMult >= DIST);
checkHits(q(qPt, 35.74*KM2DEG, distErrPct), 1, null);
assertTrue(30*distMult < DIST);
checkHits(q(qPt, 30*KM2DEG, distErrPct), 0, null);
assertTrue(33*distMult < DIST);
checkHits(q(qPt, 33*KM2DEG, distErrPct), 0, null);
assertTrue(34*distMult < DIST);
checkHits(q(qPt, 34*KM2DEG, distErrPct), 0, null);
}
[Test]
public void geohashRecursiveRandom()
{
init(12);
var random = NewRandom();
//1. Iterate test with the cluster at some worldly point of interest
var clusterCenters = new Point[] {ctx.MakePoint(-180, 0), ctx.MakePoint(0, 90), ctx.MakePoint(0, -90)};
foreach (var clusterCenter in clusterCenters)
{
//2. Iterate on size of cluster (a really small one and a large one)
String hashCenter = GeohashUtils.EncodeLatLon(clusterCenter.GetY(), clusterCenter.GetX(), maxLength);
//calculate the number of degrees in the smallest grid box size (use for both lat & lon)
String smallBox = hashCenter.Substring(0, hashCenter.Length - 1); //chop off leaf precision
Rectangle clusterDims = GeohashUtils.DecodeBoundary(smallBox, ctx);
double smallRadius = Math.Max(clusterDims.GetMaxX() - clusterDims.GetMinX(),
clusterDims.GetMaxY() - clusterDims.GetMinY());
Assert.IsTrue(smallRadius < 1);
const double largeRadius = 20d; //good large size; don't use >=45 for this test code to work
double[] radiusDegs = {largeRadius, smallRadius};
foreach (double radiusDeg in radiusDegs)
{
//3. Index random points in this cluster box
deleteAll();
var points = new List<Point>();
for (int i = 0; i < 20; i++)
{
//Note that this will not result in randomly distributed points in the
// circle, they will be concentrated towards the center a little. But
// it's good enough.
Point pt = ctx.GetDistCalc().PointOnBearing(clusterCenter,
random.NextDouble()*radiusDeg, random.Next()*360,
ctx, null);
pt = alignGeohash(pt);
points.Add(pt);
addDocument(newDoc("" + i, pt));
}
commit();
//3. Use some query centers. Each is twice the cluster's radius away.
for (int ri = 0; ri < 4; ri++)
{
Point queryCenter = ctx.GetDistCalc().PointOnBearing(clusterCenter,
radiusDeg*2, random.Next(360), ctx, null);
queryCenter = alignGeohash(queryCenter);
//4.1 Query a small box getting nothing
checkHits(q(queryCenter, radiusDeg - smallRadius/2), 0, null);
//4.2 Query a large box enclosing the cluster, getting everything
checkHits(q(queryCenter, radiusDeg*3 + smallRadius/2), points.Count, null);
//4.3 Query a medium box getting some (calculate the correct solution and verify)
double queryDist = radiusDeg*2;
//Find matching points. Put into int[] of doc ids which is the same thing as the index into points list.
int[] ids = new int[points.Count];
int ids_sz = 0;
for (int i = 0; i < points.Count; i++)
{
Point point = points[i];
if (ctx.GetDistCalc().Distance(queryCenter, point) <= queryDist)
ids[ids_sz++] = i;
}
var ids_new = new int[ids_sz]; // will pad with 0's if larger
Array.Copy(ids, ids_new, ids_sz);
ids = ids_new;
//assert ids_sz > 0 (can't because randomness keeps us from being able to)
checkHits(q(queryCenter, queryDist), ids.Length, ids);
}
} //for radiusDeg
} //for clusterCenter
}
private SpatialArgs q(Point pt, double dist, double distErrPct = 0.0)
{
Shape shape = ctx.MakeCircle(pt, dist);
var args = new SpatialArgs(SpatialOperation.Intersects, shape);
args.DistErrPct = distErrPct;
return args;
}
private void checkHits(SpatialArgs args, int assertNumFound, int[] assertIds)
{
SearchResults got = executeQuery(strategy.MakeQuery(args), 100);
assertEquals("" + args, assertNumFound, got.numFound);
if (assertIds != null)
{
var gotIds = new HashSet<int>();
foreach (SearchResult result in got.results)
{
gotIds.Add(int.Parse(result.document.Get("id")));
}
foreach (int assertId in assertIds)
{
Assert.True(gotIds.Contains(assertId), "has " + assertId);
}
}
}
/* NGeohash round-trip for given precision. */
private Point alignGeohash(Point p)
{
return GeohashUtils.Decode(GeohashUtils.EncodeLatLon(p.GetY(), p.GetX(), maxLength), ctx);
}
}
}

View File

@@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using Lucene.Net.Documents;
using Lucene.Net.Spatial.Prefix;
using Lucene.Net.Spatial.Prefix.Tree;
using Lucene.Net.Spatial.Queries;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Shapes;
using Spatial4n.Core.Shapes.Impl;
namespace Lucene.Net.Contrib.Spatial.Test.Prefix
{
public class TestTermQueryPrefixGridStrategy : SpatialTestCase
{
[Test]
public void testNGramPrefixGridLosAngeles()
{
SpatialContext ctx = SpatialContext.GEO;
TermQueryPrefixTreeStrategy prefixGridStrategy = new TermQueryPrefixTreeStrategy(new QuadPrefixTree(ctx), "geo");
Shape point = ctx.MakePoint(-118.243680, 34.052230);
Document losAngeles = new Document();
losAngeles.Add(new Field("name", "Los Angeles", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
foreach (var indexableField in prefixGridStrategy.CreateIndexableFields(point))
{
losAngeles.Add(indexableField);
}
losAngeles.Add(new Field(prefixGridStrategy.GetFieldName(), ctx.ToString(point), Field.Store.YES, Field.Index.NO));
addDocumentsAndCommit(new List<Document> { losAngeles });
// This won't work with simple spatial context...
SpatialArgsParser spatialArgsParser = new SpatialArgsParser();
// TODO... use a non polygon query
// SpatialArgs spatialArgs = spatialArgsParser.parse(
// "Intersects(POLYGON((-127.00390625 39.8125,-112.765625 39.98828125,-111.53515625 31.375,-125.94921875 30.14453125,-127.00390625 39.8125)))",
// new SimpleSpatialContext());
// Query query = prefixGridStrategy.makeQuery(spatialArgs, fieldInfo);
// SearchResults searchResults = executeQuery(query, 1);
// assertEquals(1, searchResults.numFound);
}
}
}

View File

@@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using Lucene.Net.Spatial.Prefix.Tree;
using Lucene.Net.Util;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Shapes;
namespace Lucene.Net.Contrib.Spatial.Test.Prefix.Tree
{
public class SpatialPrefixTreeTest : LuceneTestCase
{
//TODO plug in others and test them
private SpatialContext ctx;
private SpatialPrefixTree trie;
[SetUp]
public override void SetUp()
{
base.SetUp();
ctx = SpatialContext.GEO;
trie = new GeohashPrefixTree(ctx, 4);
}
[Test]
public void testNodeTraverse()
{
Node prevN = null;
Node n = trie.GetWorldNode();
Assert.AreEqual(0, n.GetLevel());
Assert.AreEqual(ctx.GetWorldBounds(), n.GetShape());
while (n.GetLevel() < trie.GetMaxLevels())
{
prevN = n;
var it = n.GetSubCells().GetEnumerator();
it.MoveNext();
n = it.Current; //TODO random which one?
Assert.AreEqual(prevN.GetLevel() + 1, n.GetLevel());
Rectangle prevNShape = (Rectangle) prevN.GetShape();
Shape s = n.GetShape();
Rectangle sbox = s.GetBoundingBox();
Assert.IsTrue(prevNShape.GetWidth() > sbox.GetWidth());
Assert.IsTrue(prevNShape.GetHeight() > sbox.GetHeight());
}
}
}
}

View File

@@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Lucene.Net.Contrib.Spatial.Test")]
[assembly: AssemblyDescription("The Apache Software Foundation Lucene.Net a full-text search engine library")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("The Apache Software Foundation")]
[assembly: AssemblyProduct("Lucene.Net.Contrib.Spatial.Test")]
[assembly: AssemblyCopyright("Copyright 2009 - 2011 The Apache Software Foundation")]
[assembly: AssemblyTrademark("Copyright 2009 - 2011 The Apache Software Foundation")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("5d64af8f-cf79-484d-9fc4-57da1b6c49fc")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyInformationalVersionAttribute("3.0.3")]
[assembly: AssemblyVersion("3.0.3")]
[assembly: AssemblyFileVersion("3.0.3")]
[assembly: AssemblyDelaySign(false)]
[assembly: AssemblyKeyFile("")]
[assembly: AssemblyKeyName("")]

View File

@@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Lucene.Net.Spatial.Queries;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Shapes;
namespace Lucene.Net.Contrib.Spatial.Test.Queries
{
public class SpatialArgsParserTest
{
private readonly SpatialContext ctx = SpatialContext.GEO;
//The args parser is only dependent on the ctx for IO so I don't care to test
// with other implementations.
[Test]
public void TestArgParser()
{
SpatialArgsParser parser = new SpatialArgsParser();
String arg = SpatialOperation.IsWithin + "(-10 -20 10 20)";
SpatialArgs outValue = parser.Parse(arg, ctx);
Assert.AreEqual(SpatialOperation.IsWithin, outValue.Operation);
Rectangle bounds = (Rectangle)outValue.Shape;
Assert.AreEqual(-10.0, bounds.GetMinX(), 0D);
Assert.AreEqual(10.0, bounds.GetMaxX(), 0D);
// Disjoint should not be scored
arg = SpatialOperation.IsDisjointTo + " (-10 10 -20 20)";
outValue = parser.Parse(arg, ctx);
Assert.AreEqual(SpatialOperation.IsDisjointTo, outValue.Operation);
try
{
parser.Parse(SpatialOperation.IsDisjointTo + "[ ]", ctx);
Assert.True(false, "spatial operations need args");
}
catch (Exception)
{
//expected
}
try
{
parser.Parse("XXXX(-10 10 -20 20)", ctx);
Assert.True(false, "unknown operation!");
}
catch (Exception)
{
//expected
}
}
}
}

View File

@@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Lucene.Net.Contrib.Spatial.Test
{
public class SpatialMatchConcern
{
public readonly bool orderIsImportant;
public readonly bool resultsAreSuperset; // if the strategy can not give exact answers, but used to limit results
private SpatialMatchConcern(bool order, bool superset)
{
this.orderIsImportant = order;
this.resultsAreSuperset = superset;
}
public static SpatialMatchConcern EXACT = new SpatialMatchConcern(true, false);
public static SpatialMatchConcern FILTER = new SpatialMatchConcern(false, false);
public static SpatialMatchConcern SUPERSET = new SpatialMatchConcern(false, true);
}
}

View File

@@ -0,0 +1,197 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Spatial.Util;
using Lucene.Net.Store;
using Lucene.Net.Util;
using NUnit.Framework;
using Directory = Lucene.Net.Store.Directory;
namespace Lucene.Net.Contrib.Spatial.Test
{
public class SpatialTestCase : LuceneTestCase
{
private DirectoryReader indexReader;
private IndexWriter indexWriter;
private Directory directory;
protected IndexSearcher indexSearcher;
[SetUp]
public override void SetUp()
{
base.SetUp();
directory = NewDirectory();
indexWriter = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
}
[TearDown]
public override void TearDown()
{
if (indexWriter != null)
{
indexWriter.Dispose();
indexWriter = null;
}
if (indexReader != null)
{
indexReader.Dispose();
indexReader = null;
}
if (directory != null)
{
directory.Dispose();
directory = null;
}
CompatibilityExtensions.PurgeSpatialCaches(null);
base.TearDown();
}
// ================================================= Helper Methods ================================================
public static Directory NewDirectory()
{
return new RAMDirectory();
}
/// <summary>
/// create a new searcher over the reader.
/// </summary>
/// <param name="r"></param>
/// <returns></returns>
public static IndexSearcher newSearcher(IndexReader r)
{
return new IndexSearcher(r);
}
protected void addDocument(Document doc)
{
indexWriter.AddDocument(doc);
}
protected void addDocumentsAndCommit(List<Document> documents)
{
foreach (var document in documents)
{
indexWriter.AddDocument(document);
}
commit();
}
protected void deleteAll()
{
indexWriter.DeleteAll();
}
protected void commit()
{
indexWriter.Commit();
if (indexReader == null)
{
indexReader = (DirectoryReader)IndexReader.Open(directory, true);
}
else
{
indexReader = (DirectoryReader)indexReader.Reopen();
}
indexSearcher = newSearcher(indexReader);
}
protected void verifyDocumentsIndexed(int numDocs)
{
Assert.AreEqual(numDocs, indexReader.NumDocs());
}
protected SearchResults executeQuery(Query query, int numDocs)
{
try
{
TopDocs topDocs = indexSearcher.Search(query, numDocs);
var results = new List<SearchResult>();
foreach (ScoreDoc scoreDoc in topDocs.ScoreDocs)
{
results.Add(new SearchResult(scoreDoc.Score, indexSearcher.Doc(scoreDoc.Doc)));
}
return new SearchResults(topDocs.TotalHits, results);
}
catch (IOException ioe)
{
throw new Exception("IOException thrown while executing query", ioe);
}
}
// ================================================= Inner Classes =================================================
protected class SearchResults
{
public int numFound;
public List<SearchResult> results;
public SearchResults(int numFound, List<SearchResult> results)
{
this.numFound = numFound;
this.results = results;
}
public StringBuilder toDebugString()
{
StringBuilder str = new StringBuilder();
str.Append("found: ").Append(numFound).Append('[');
foreach (SearchResult r in results)
{
String id = r.document.Get("id");
str.Append(id).Append(", ");
}
str.Append(']');
return str;
}
public override String ToString()
{
return "[found:" + numFound + " " + results + "]";
}
}
protected class SearchResult
{
public float score;
public Document document;
public SearchResult(float score, Document document)
{
this.score = score;
this.document = document;
}
public override String ToString()
{
return "[" + score + "=" + document + "]";
}
}
}
}

View File

@@ -0,0 +1,103 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Lucene.Net.Spatial.Queries;
using Spatial4n.Core.Context;
using Spatial4n.Core.Io;
namespace Lucene.Net.Contrib.Spatial.Test
{
/// <summary>
/// Helper class to execute queries
/// </summary>
public class SpatialTestQuery
{
public String testname;
public String line;
public int lineNumber = -1;
public SpatialArgs args;
public List<String> ids = new List<String>();
public class SpatialTestQueryLineReader : LineReader<SpatialTestQuery>
{
private readonly SpatialArgsParser parser;
private readonly SpatialContext ctx;
public SpatialTestQueryLineReader(Stream @in, SpatialArgsParser parser, SpatialContext ctx)
: base(@in)
{
this.parser = parser;
this.ctx = ctx;
}
public SpatialTestQueryLineReader(StreamReader r, SpatialArgsParser parser, SpatialContext ctx)
: base(r)
{
this.parser = parser;
this.ctx = ctx;
}
public override SpatialTestQuery ParseLine(string line)
{
var test = new SpatialTestQuery {line = line, lineNumber = GetLineNumber()};
// skip a comment
if (line.StartsWith("["))
{
int idx0 = line.IndexOf(']');
if (idx0 > 0)
{
line = line.Substring(idx0 + 1);
}
}
int idx = line.IndexOf('@');
var pos = 0;
var st = line.Substring(0, idx).Split(new[] {' ', '\t', '\n', '\r', '\f'}, StringSplitOptions.RemoveEmptyEntries);
while (pos < st.Length)
{
test.ids.Add(st[pos++].Trim());
}
test.args = parser.Parse(line.Substring(idx + 1).Trim(), ctx);
return test;
}
}
/// <summary>
/// Get Test Queries
/// </summary>
/// <param name="parser"></param>
/// <param name="ctx"></param>
/// <param name="name"></param>
/// <param name="in"></param>
/// <returns></returns>
public static IEnumerator<SpatialTestQuery> getTestQueries(
SpatialArgsParser parser,
SpatialContext ctx,
String name,
Stream @in)
{
return new SpatialTestQueryLineReader(new StreamReader(@in, Encoding.UTF8), parser, ctx);
}
}
}

View File

@@ -0,0 +1,259 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Search.Function;
using Lucene.Net.Spatial;
using Lucene.Net.Spatial.Queries;
using Lucene.Net.Spatial.Util;
using Lucene.Net.Util;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Io;
using Spatial4n.Core.Io.Samples;
using Spatial4n.Core.Shapes;
namespace Lucene.Net.Contrib.Spatial.Test
{
public abstract class StrategyTestCase : SpatialTestCase
{
public static readonly String DATA_SIMPLE_BBOX = "simple-bbox.txt";
public static readonly String DATA_STATES_POLY = "states-poly.txt";
public static readonly String DATA_STATES_BBOX = "states-bbox.txt";
public static readonly String DATA_COUNTRIES_POLY = "countries-poly.txt";
public static readonly String DATA_COUNTRIES_BBOX = "countries-bbox.txt";
public static readonly String DATA_WORLD_CITIES_POINTS = "world-cities-points.txt";
public static readonly String QTEST_States_IsWithin_BBox = "states-IsWithin-BBox.txt";
public static readonly String QTEST_States_Intersects_BBox = "states-Intersects-BBox.txt";
public static readonly String QTEST_Cities_Intersects_BBox = "cities-Intersects-BBox.txt";
public static readonly String QTEST_Simple_Queries_BBox = "simple-Queries-BBox.txt";
//private Logger log = Logger.getLogger(getClass().getName());
protected readonly SpatialArgsParser argsParser = new SpatialArgsParser();
protected SpatialStrategy strategy;
protected SpatialContext ctx;
protected bool storeShape = true;
[TearDown]
public override void TearDown()
{
base.TearDown();
ctx = null;
strategy = null;
storeShape = true;
}
protected void executeQueries(SpatialMatchConcern concern, params String[] testQueryFile)
{
Console.WriteLine("testing queries for strategy " + strategy + ". Executer: " + GetType().Name);
foreach (String path in testQueryFile)
{
IEnumerator<SpatialTestQuery> testQueryIterator = getTestQueries(path, ctx);
runTestQueries(testQueryIterator, concern);
}
}
protected void getAddAndVerifyIndexedDocuments(String testDataFile)
{
List<Document> testDocuments = getDocuments(testDataFile);
addDocumentsAndCommit(testDocuments);
verifyDocumentsIndexed(testDocuments.Count);
}
protected List<Document> getDocuments(String testDataFile)
{
IEnumerator<SampleData> sampleData = getSampleData(testDataFile);
var documents = new List<Document>();
while (sampleData.MoveNext())
{
SampleData data = sampleData.Current;
var document = new Document();
document.Add(new Field("id", data.id, Field.Store.YES, Field.Index.ANALYZED));
document.Add(new Field("name", data.name, Field.Store.YES, Field.Index.ANALYZED));
Shape shape = new ShapeReadWriter(ctx).ReadShape(data.shape);
shape = convertShapeFromGetDocuments(shape);
if (shape != null)
{
foreach (var f in strategy.CreateIndexableFields(shape))
{
document.Add(f);
}
if (storeShape)
document.Add(new Field(strategy.GetFieldName(), ctx.ToString(shape), Field.Store.YES,
Field.Index.NOT_ANALYZED_NO_NORMS));
}
documents.Add(document);
}
return documents;
}
/* Subclasses may override to transform or remove a shape for indexing */
protected virtual Shape convertShapeFromGetDocuments(Shape shape)
{
return shape;
}
protected IEnumerator<SampleData> getSampleData(String testDataFile)
{
var stream =
File.OpenRead(Path.Combine(Paths.ProjectRootDirectory,
Path.Combine(@"test-files\spatial\data", testDataFile)));
return new SampleDataReader(stream);
}
protected IEnumerator<SpatialTestQuery> getTestQueries(String testQueryFile, SpatialContext ctx)
{
var @in =
File.OpenRead(Path.Combine(Paths.ProjectRootDirectory,
Path.Combine(@"test-files\spatial", testQueryFile)));
return SpatialTestQuery.getTestQueries(argsParser, ctx, testQueryFile, @in);
}
public void runTestQueries(
IEnumerator<SpatialTestQuery> queries,
SpatialMatchConcern concern)
{
while (queries.MoveNext())
{
SpatialTestQuery q = queries.Current;
String msg = q.line; //"Query: " + q.args.toString(ctx);
SearchResults got = executeQuery(strategy.MakeQuery(q.args), 100);
if (storeShape && got.numFound > 0)
{
//check stored value is there & parses
assertNotNull(
new ShapeReadWriter(ctx).ReadShape(got.results[0].document.Get(strategy.GetFieldName())));
}
if (concern.orderIsImportant)
{
var ids = q.ids.GetEnumerator();
foreach (var r in got.results)
{
String id = r.document.Get("id");
if (!ids.MoveNext())
Assert.Fail(msg + " :: Did not get enough results. Expected " + q.ids + ", got: " +
got.toDebugString());
Assert.AreEqual(ids.Current, id, "out of order: " + msg);
}
if (ids.MoveNext())
{
Assert.Fail(msg + " :: expect more results then we got: " + ids.Current);
}
}
else
{
// We are looking at how the results overlap
if (concern.resultsAreSuperset)
{
var found = new HashSet<String>();
foreach (var r in got.results)
{
found.Add(r.document.Get("id"));
}
foreach (String s in q.ids)
{
if (!found.Contains(s))
{
Assert.Fail("Results are mising id: " + s + " :: " + found);
}
}
}
else
{
var found = new List<String>();
foreach (SearchResult r in got.results)
{
found.Add(r.document.Get("id"));
}
// sort both so that the order is not important
q.ids.Sort();
found.Sort();
Assert.AreEqual(q.ids.Count, found.Count);
for (var i = 0; i < found.Count; i++)
{
Assert.AreEqual(q.ids[i], found[i], msg);
}
}
}
}
}
protected void adoc(String id, String shapeStr)
{
Shape shape = shapeStr == null ? null : new ShapeReadWriter(ctx).ReadShape(shapeStr);
addDocument(newDoc(id, shape));
}
protected void adoc(String id, Shape shape)
{
addDocument(newDoc(id, shape));
}
protected virtual Document newDoc(String id, Shape shape)
{
Document doc = new Document();
doc.Add(new Field("id", id, Field.Store.YES, Field.Index.ANALYZED));
if (shape != null)
{
foreach (var f in strategy.CreateIndexableFields(shape))
{
doc.Add(f);
}
if (storeShape)
doc.Add(new Field(strategy.GetFieldName(), ctx.ToString(shape), Field.Store.YES,
Field.Index.NOT_ANALYZED_NO_NORMS));
}
return doc;
}
/* scores[] are in docId order */
protected void checkValueSource(ValueSource vs, float[] scores, float delta)
{
FunctionQuery q = new FunctionQuery(vs);
// //TODO is there any point to this check?
// int expectedDocs[] = new int[scores.length];//fill with ascending 0....length-1
// for (int i = 0; i < expectedDocs.length; i++) {
// expectedDocs[i] = i;
// }
// CheckHits.checkHits(random(), q, "", indexSearcher, expectedDocs);
TopDocs docs = indexSearcher.Search(q, 1000); //calculates the score
for (int i = 0; i < docs.ScoreDocs.Length; i++)
{
ScoreDoc gotSD = docs.ScoreDocs[i];
float expectedScore = scores[gotSD.Doc];
assertEquals("Not equal for doc " + gotSD.Doc, expectedScore, gotSD.Score, delta);
}
CheckHits.checkExplanations(q, "", indexSearcher);
}
}
}

View File

@@ -0,0 +1,317 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Spatial.Tier;
using Lucene.Net.Spatial.Tier.Projectors;
using Lucene.Net.Store;
using Lucene.Net.Util;
using NUnit.Framework;
namespace Lucene.Net.Contrib.Spatial.Test
{
[TestFixture]
public class TestCartesian
{
private Directory _directory;
private IndexSearcher _searcher;
// reston va
private double _lat = 38.969398;
private double _lng = -77.386398;
private const string LatField = "lat";
private const string LngField = "lng";
private readonly List<CartesianTierPlotter> _ctps = new List<CartesianTierPlotter>();
private readonly IProjector _projector = new SinusoidalProjector();
[SetUp]
protected void SetUp()
{
_directory = new RAMDirectory();
var writer = new IndexWriter(_directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
SetUpPlotter(2, 15);
AddData(writer);
}
private void SetUpPlotter(int @base, int top)
{
for (; @base <= top; @base++)
{
_ctps.Add(new CartesianTierPlotter(@base, _projector, CartesianTierPlotter.DefaltFieldPrefix));
}
}
private void AddData(IndexWriter writer)
{
AddPoint(writer, "McCormick &amp; Schmick's Seafood Restaurant", 38.9579000, -77.3572000);
AddPoint(writer, "Jimmy's Old Town Tavern", 38.9690000, -77.3862000);
AddPoint(writer, "Ned Devine's", 38.9510000, -77.4107000);
AddPoint(writer, "Old Brogue Irish Pub", 38.9955000, -77.2884000);
AddPoint(writer, "Alf Laylah Wa Laylah", 38.8956000, -77.4258000);
AddPoint(writer, "Sully's Restaurant &amp; Supper", 38.9003000, -77.4467000);
AddPoint(writer, "TGI Friday", 38.8725000, -77.3829000);
AddPoint(writer, "Potomac Swing Dance Club", 38.9027000, -77.2639000);
AddPoint(writer, "White Tiger Restaurant", 38.9027000, -77.2638000);
AddPoint(writer, "Jammin' Java", 38.9039000, -77.2622000);
AddPoint(writer, "Potomac Swing Dance Club", 38.9027000, -77.2639000);
AddPoint(writer, "WiseAcres Comedy Club", 38.9248000, -77.2344000);
AddPoint(writer, "Glen Echo Spanish Ballroom", 38.9691000, -77.1400000);
AddPoint(writer, "Whitlow's on Wilson", 38.8889000, -77.0926000);
AddPoint(writer, "Iota Club and Cafe", 38.8890000, -77.0923000);
AddPoint(writer, "Hilton Washington Embassy Row", 38.9103000, -77.0451000);
AddPoint(writer, "HorseFeathers, Bar & Grill", 39.01220000000001, -77.3942);
AddPoint(writer, "Marshall Island Airfield", 7.06, 171.2);
AddPoint(writer, "Midway Island", 25.7, -171.7);
AddPoint(writer, "North Pole Way", 55.0, 4.0);
writer.Commit();
writer.Close();
}
private void AddPoint(IndexWriter writer, String name, double lat, double lng)
{
Document doc = new Document();
doc.Add(new Field("name", name, Field.Store.YES, Field.Index.ANALYZED));
// convert the lat / long to lucene fields
doc.Add(new Field(LatField, NumericUtils.DoubleToPrefixCoded(lat), Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(LngField, NumericUtils.DoubleToPrefixCoded(lng), Field.Store.YES, Field.Index.NOT_ANALYZED));
// add a default meta field to make searching all documents easy
doc.Add(new Field("metafile", "doc", Field.Store.YES, Field.Index.ANALYZED));
int ctpsize = _ctps.Count;
for (int i = 0; i < ctpsize; i++)
{
CartesianTierPlotter ctp = _ctps[i];
var boxId = ctp.GetTierBoxId(lat, lng);
doc.Add(new Field(ctp.GetTierFieldName(),
NumericUtils.DoubleToPrefixCoded(boxId),
Field.Store.YES,
Field.Index.NOT_ANALYZED_NO_NORMS));
}
writer.AddDocument(doc);
}
[Test]
public void TestAntiM()
{
_searcher = new IndexSearcher(_directory, true);
const double miles = 6.0;
Console.WriteLine("testAntiM");
// create a distance query
var dq = new DistanceQueryBuilder(_lat, _lng, miles, LatField, LngField, CartesianTierPlotter.DefaltFieldPrefix, true);
Console.WriteLine(dq);
//create a term query to search against all documents
Query tq = new TermQuery(new Term("metafile", "doc"));
var dsort = new DistanceFieldComparatorSource(dq.DistanceFilter);
Sort sort = new Sort(new SortField("foo", dsort, false));
// Perform the search, using the term query, the distance filter, and the
// distance sort
TopDocs hits = _searcher.Search(tq, dq.Filter, 1000, sort);
int results = hits.TotalHits;
ScoreDoc[] scoreDocs = hits.ScoreDocs;
// Get a list of distances
Dictionary<int, Double> distances = dq.DistanceFilter.Distances;
Console.WriteLine("Distance Filter filtered: " + distances.Count);
Console.WriteLine("Results: " + results);
Console.WriteLine("=============================");
Console.WriteLine("Distances should be 7 " + distances.Count);
Console.WriteLine("Results should be 7 " + results);
Assert.AreEqual(7, distances.Count); // fixed a store of only needed distances
Assert.AreEqual(7, results);
double lastDistance = 0;
for (int i = 0; i < results; i++)
{
Document d = _searcher.Doc(scoreDocs[i].Doc);
String name = d.Get("name");
double rsLat = NumericUtils.PrefixCodedToDouble(d.Get(LatField));
double rsLng = NumericUtils.PrefixCodedToDouble(d.Get(LngField));
Double geo_distance = distances[scoreDocs[i].Doc];
double distance = DistanceUtils.GetInstance().GetDistanceMi(_lat, _lng, rsLat, rsLng);
double llm = DistanceUtils.GetInstance().GetLLMDistance(_lat, _lng, rsLat, rsLng);
Console.WriteLine("Name: " + name + ", Distance " + distance);
Assert.IsTrue(Math.Abs((distance - llm)) < 1);
Assert.IsTrue((distance < miles));
Assert.IsTrue(geo_distance >= lastDistance);
lastDistance = geo_distance;
}
}
}
[TestFixture]
public class TestCartesian2
{
private Directory _directory;
private IndexSearcher _searcher;
// reston va
private double _lat = 55.6880508001;
private double _lng = 13.5871808352; // This passes: 13.6271808352
private const string LatField = "lat";
private const string LngField = "lng";
private readonly List<CartesianTierPlotter> _ctps = new List<CartesianTierPlotter>();
private readonly IProjector _projector = new SinusoidalProjector();
[SetUp]
protected void SetUp()
{
_directory = new RAMDirectory();
var writer = new IndexWriter(_directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
SetUpPlotter(2, 15);
AddData(writer);
}
private void SetUpPlotter(int @base, int top)
{
for (; @base <= top; @base++)
{
_ctps.Add(new CartesianTierPlotter(@base, _projector, CartesianTierPlotter.DefaltFieldPrefix));
}
}
private void AddData(IndexWriter writer)
{
AddPoint(writer, "Within radius", 55.6880508001, 13.5717346673);
AddPoint(writer, "Within radius", 55.6821978456, 13.6076183965);
AddPoint(writer, "Within radius", 55.673251569, 13.5946697607);
AddPoint(writer, "Close but not in radius", 55.8634157297, 13.5497731987);
AddPoint(writer, "Faar away", 40.7137578228, -74.0126901936);
writer.Commit();
writer.Close();
}
private void AddPoint(IndexWriter writer, String name, double lat, double lng)
{
Document doc = new Document();
doc.Add(new Field("name", name, Field.Store.YES, Field.Index.ANALYZED));
// convert the lat / long to lucene fields
doc.Add(new Field(LatField, NumericUtils.DoubleToPrefixCoded(lat), Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field(LngField, NumericUtils.DoubleToPrefixCoded(lng), Field.Store.YES, Field.Index.NOT_ANALYZED));
// add a default meta field to make searching all documents easy
doc.Add(new Field("metafile", "doc", Field.Store.YES, Field.Index.ANALYZED));
int ctpsize = _ctps.Count;
for (int i = 0; i < ctpsize; i++)
{
CartesianTierPlotter ctp = _ctps[i];
var boxId = ctp.GetTierBoxId(lat, lng);
doc.Add(new Field(ctp.GetTierFieldName(),
NumericUtils.DoubleToPrefixCoded(boxId),
Field.Store.YES,
Field.Index.NOT_ANALYZED_NO_NORMS));
}
writer.AddDocument(doc);
}
[Test]
public void TestAntiM()
{
_searcher = new IndexSearcher(_directory, true);
const double miles = 5.0;
Console.WriteLine("testAntiM");
// create a distance query
var dq = new DistanceQueryBuilder(_lat, _lng, miles, LatField, LngField, CartesianTierPlotter.DefaltFieldPrefix, true);
Console.WriteLine(dq);
//create a term query to search against all documents
Query tq = new TermQuery(new Term("metafile", "doc"));
var dsort = new DistanceFieldComparatorSource(dq.DistanceFilter);
Sort sort = new Sort(new SortField("foo", dsort, false));
// Perform the search, using the term query, the distance filter, and the
// distance sort
TopDocs hits = _searcher.Search(tq, dq.Filter, 1000, sort);
int results = hits.TotalHits;
ScoreDoc[] scoreDocs = hits.ScoreDocs;
// Get a list of distances
Dictionary<int, Double> distances = dq.DistanceFilter.Distances;
Console.WriteLine("Distance Filter filtered: " + distances.Count);
Console.WriteLine("Results: " + results);
Console.WriteLine("=============================");
Console.WriteLine("Distances should be 3 " + distances.Count);
Console.WriteLine("Results should be 3 " + results);
Assert.AreEqual(3, distances.Count); // fixed a store of only needed distances
Assert.AreEqual(3, results);
double lastDistance = 0;
for (int i = 0; i < results; i++)
{
Document d = _searcher.Doc(scoreDocs[i].Doc);
String name = d.Get("name");
double rsLat = NumericUtils.PrefixCodedToDouble(d.Get(LatField));
double rsLng = NumericUtils.PrefixCodedToDouble(d.Get(LngField));
Double geo_distance = distances[scoreDocs[i].Doc];
double distance = DistanceUtils.GetInstance().GetDistanceMi(_lat, _lng, rsLat, rsLng);
double llm = DistanceUtils.GetInstance().GetLLMDistance(_lat, _lng, rsLat, rsLng);
Console.WriteLine("Name: " + name + ", Distance " + distance);
Assert.IsTrue(Math.Abs((distance - llm)) < 1);
Assert.IsTrue((distance < miles));
Assert.IsTrue(geo_distance >= lastDistance);
lastDistance = geo_distance;
}
}
}
}

View File

@@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using Lucene.Net.Util;
using NUnit.Framework;
namespace Lucene.Net.Contrib.Spatial.Test
{
/// <summary>
/// Make sure we are reading the tests as expected
/// </summary>
public class TestTestFramework : LuceneTestCase
{
// public void testQueries()
// {
// String name = StrategyTestCase.QTEST_Cities_IsWithin_BBox;
// InputStream @in = getClass().getClassLoader().getResourceAsStream(name);
// SpatialContext ctx = SimpleSpatialContext.GEO_KM;
// Iterator<SpatialTestQuery> iter = SpatialTestQuery.getTestQueries(
// new SpatialArgsParser(), ctx, name, in );
// List<SpatialTestQuery> tests = new ArrayList<SpatialTestQuery>();
// while( iter.hasNext() ) {
// tests.add( iter.next() );
// }
// Assert.assertEquals( 3, tests.size() );
// SpatialTestQuery sf = tests.get(0);
// // assert
// Assert.assertEquals( 1, sf.ids.size() );
// Assert.assertTrue( sf.ids.get(0).equals( "G5391959" ) );
// Assert.assertTrue( sf.args.getShape() instanceof Rectangle);
// Assert.assertEquals( SpatialOperation.IsWithin, sf.args.getOperation() );
//}
}
}

View File

@@ -0,0 +1,64 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Lucene.Net.Search;
using Lucene.Net.Spatial.Queries;
using Lucene.Net.Spatial.Vector;
using NUnit.Framework;
using Spatial4n.Core.Context;
using Spatial4n.Core.Shapes;
using Spatial4n.Core.Shapes.Impl;
using Spatial4n.Core.Exceptions;
namespace Lucene.Net.Contrib.Spatial.Test.Vector
{
public class TestTwoDoublesStrategy : StrategyTestCase
{
public override void SetUp()
{
base.SetUp();
this.ctx = SpatialContext.GEO;
this.strategy = new PointVectorStrategy(ctx, GetType().Name);
}
[Test]
public void testCircleShapeSupport()
{
Circle circle = ctx.MakeCircle(ctx.MakePoint(0, 0), 10);
SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, circle);
Query query = this.strategy.MakeQuery(args);
Assert.NotNull(query);
}
[Test]
public void testInvalidQueryShape()
{
Point point = ctx.MakePoint(0, 0);
var args = new SpatialArgs(SpatialOperation.Intersects, point);
Assert.Throws<InvalidOperationException>(() => this.strategy.MakeQuery(args));
}
[Test]
public void testCitiesIntersectsBBox()
{
getAddAndVerifyIndexedDocuments(DATA_WORLD_CITIES_POINTS);
executeQueries(SpatialMatchConcern.FILTER, QTEST_Cities_Intersects_BBox);
}
}
}