ignite-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sboi...@apache.org
Subject [11/22] ignite git commit: IGNITE-5010: DDL: additional tests for spatial dynamic indexes.
Date Thu, 20 Apr 2017 09:39:25 GMT
IGNITE-5010: DDL: additional tests for spatial dynamic indexes.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/f923bc91
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/f923bc91
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/f923bc91

Branch: refs/heads/ignite-1794
Commit: f923bc91fec38ad4e024ae1b395c5bf9a0a50e12
Parents: b1822da
Author: devozerov <vozerov@gridgain.com>
Authored: Wed Apr 19 17:54:53 2017 +0300
Committer: devozerov <vozerov@gridgain.com>
Committed: Wed Apr 19 17:54:53 2017 +0300

----------------------------------------------------------------------
 .../h2/GridBinaryH2IndexingGeoSelfTest.java     |  35 -
 .../query/h2/GridH2IndexingGeoSelfTest.java     | 470 -------------
 .../h2/GridH2IndexingSegmentedGeoSelfTest.java  |  37 -
 .../query/h2/H2IndexingAbstractGeoSelfTest.java | 673 +++++++++++++++++++
 .../query/h2/H2IndexingBinaryGeoSelfTest.java   |  30 +
 .../H2IndexingBinarySegmentedGeoSelfTest.java   |  30 +
 .../query/h2/H2IndexingGeoSelfTest.java         |  30 +
 .../h2/H2IndexingSegmentedGeoSelfTest.java      |  30 +
 .../testsuites/GeoSpatialIndexingTestSuite.java |  16 +-
 9 files changed, 802 insertions(+), 549 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridBinaryH2IndexingGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridBinaryH2IndexingGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridBinaryH2IndexingGeoSelfTest.java
deleted file mode 100644
index e271712..0000000
--- a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridBinaryH2IndexingGeoSelfTest.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.processors.query.h2;
-
-import org.apache.ignite.configuration.IgniteConfiguration;
-import org.apache.ignite.internal.binary.BinaryMarshaller;
-
-/**
- * Geo-indexing test for binary mode.
- */
-public class GridBinaryH2IndexingGeoSelfTest extends GridH2IndexingGeoSelfTest {
-    /** {@inheritDoc} */
-    @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
-        IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
-
-        cfg.setMarshaller(new BinaryMarshaller());
-
-        return cfg;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingGeoSelfTest.java
deleted file mode 100644
index 4404e9c..0000000
--- a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingGeoSelfTest.java
+++ /dev/null
@@ -1,470 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.processors.query.h2;
-
-import com.vividsolutions.jts.geom.Geometry;
-import com.vividsolutions.jts.io.ParseException;
-import com.vividsolutions.jts.io.WKTReader;
-import java.io.Serializable;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
-import javax.cache.Cache;
-import org.apache.ignite.IgniteCache;
-import org.apache.ignite.cache.CacheAtomicityMode;
-import org.apache.ignite.cache.CacheMode;
-import org.apache.ignite.cache.query.SqlFieldsQuery;
-import org.apache.ignite.cache.query.SqlQuery;
-import org.apache.ignite.cache.query.annotations.QuerySqlField;
-import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.internal.IgniteInternalFuture;
-import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
-import org.apache.ignite.internal.util.typedef.F;
-import org.apache.ignite.internal.util.typedef.internal.U;
-import org.apache.ignite.testframework.GridTestUtils;
-
-/**
- * Geo-indexing test.
- */
-public class GridH2IndexingGeoSelfTest extends GridCacheAbstractSelfTest {
-    /** */
-    private static final int CNT = 100;
-
-    /** */
-    private static final long DUR = 60000L;
-
-    /** Number of generated samples. */
-    public static final int ENEMYCAMP_SAMPLES_COUNT = 500;
-
-    /** Number of generated samples. */
-    public static final int ENEMY_SAMPLES_COUNT = 1000;
-
-    /** {@inheritDoc} */
-    @Override protected int gridCount() {
-        return 3;
-    }
-
-    /** {@inheritDoc} */
-    @Override protected long getTestTimeout() {
-        return DUR * 3;
-    }
-
-    /**
-     * @param name Cache name.
-     * @param partitioned Partition or replicated cache.
-     * @param idxTypes Indexed types.
-     * @return Cache configuration.
-     */
-    protected <K, V> CacheConfiguration<K, V> cacheConfig(String name, boolean partitioned,
-        Class<?>... idxTypes) throws Exception {
-        return new CacheConfiguration<K, V>(name)
-            .setName(name)
-            .setCacheMode(partitioned ? CacheMode.PARTITIONED : CacheMode.REPLICATED)
-            .setAtomicityMode(CacheAtomicityMode.ATOMIC)
-            .setIndexedTypes(idxTypes);
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    public void testPrimitiveGeometry() throws Exception {
-        IgniteCache<Long, Geometry> cache = grid(0).getOrCreateCache(
-            this.<Long, Geometry>cacheConfig("geom", true, Long.class, Geometry.class));
-
-        try {
-            WKTReader r = new WKTReader();
-
-            for (long i = 0; i < 100; i++)
-                cache.put(i, r.read("POINT(" + i + " " + i + ")"));
-
-            List<List<?>> res = cache.query(new SqlFieldsQuery("explain select _key from Geometry where _val && ?")
-                .setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))")).setLocal(true)).getAll();
-
-            assertTrue("__ explain: " + res, res.get(0).get(0).toString().toLowerCase().contains("_val_idx"));
-        }
-        finally {
-            cache.destroy();
-        }
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    @SuppressWarnings("unchecked")
-    public void testGeo() throws Exception {
-        IgniteCache<Integer, EnemyCamp> cache = grid(0).getOrCreateCache(
-            this.<Integer, EnemyCamp>cacheConfig("camp", true, Integer.class, EnemyCamp.class));
-
-        try {
-            WKTReader r = new WKTReader();
-
-            cache.getAndPut(0, new EnemyCamp(r.read("POINT(25 75)"), "A"));
-            cache.getAndPut(1, new EnemyCamp(r.read("POINT(70 70)"), "B"));
-            cache.getAndPut(2, new EnemyCamp(r.read("POINT(70 30)"), "C"));
-            cache.getAndPut(3, new EnemyCamp(r.read("POINT(75 25)"), "D"));
-
-            SqlQuery<Integer, EnemyCamp> qry = new SqlQuery(EnemyCamp.class, "coords && ?");
-
-            Collection<Cache.Entry<Integer, EnemyCamp>> res = cache.query(
-                qry.setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))"))).getAll();
-
-            checkPoints(res, "A");
-
-            res = cache.query(
-                qry.setArgs(r.read("POLYGON((10 5, 10 35, 70 30, 75 25, 10 5))"))).getAll();
-
-            checkPoints(res, "C", "D");
-
-            // Move B to the first polygon.
-            cache.getAndPut(1, new EnemyCamp(r.read("POINT(20 75)"), "B"));
-
-            res = cache.query(
-                qry.setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))"))).getAll();
-
-            checkPoints(res, "A", "B");
-
-            // Move B to the second polygon.
-            cache.getAndPut(1, new EnemyCamp(r.read("POINT(30 30)"), "B"));
-
-            res = cache.query(
-                qry.setArgs(r.read("POLYGON((10 5, 10 35, 70 30, 75 25, 10 5))"))).getAll();
-
-            checkPoints(res, "B", "C", "D");
-
-            // Remove B.
-            cache.getAndRemove(1);
-
-            res = cache.query(
-                qry.setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))"))).getAll();
-
-            checkPoints(res, "A");
-
-            res = cache.query(
-                qry.setArgs(r.read("POLYGON((10 5, 10 35, 70 30, 75 25, 10 5))"))).getAll();
-
-            checkPoints(res, "C", "D");
-
-            // Check explaint request.
-            assertTrue(F.first(cache.query(new SqlFieldsQuery("explain select * from EnemyCamp " +
-                "where coords && 'POINT(25 75)'")).getAll()).get(0).toString().toLowerCase().contains("coords_idx"));
-        }
-        finally {
-            cache.destroy();
-        }
-    }
-
-    /**
-     * @throws Exception If failed.
-     */
-    @SuppressWarnings("unchecked")
-    public void testGeoMultithreaded() throws Exception {
-        final CacheConfiguration<Integer, EnemyCamp> ccfg = cacheConfig("camp", true, Integer.class, EnemyCamp.class);
-
-        final IgniteCache<Integer, EnemyCamp> cache1 = grid(0).getOrCreateCache(ccfg);
-        final IgniteCache<Integer, EnemyCamp> cache2 = grid(1).cache("camp");
-        final IgniteCache<Integer, EnemyCamp> cache3 = grid(2).cache("camp");
-
-        try {
-            final String[] points = new String[CNT];
-
-            WKTReader r = new WKTReader();
-
-            ThreadLocalRandom rnd = ThreadLocalRandom.current();
-
-            for (int idx = 0; idx < CNT; idx++) {
-                int x = rnd.nextInt(1, 100);
-                int y = rnd.nextInt(1, 100);
-
-                cache1.getAndPut(idx, new EnemyCamp(r.read("POINT(" + x + " " + y + ")"), Integer.toString(idx)));
-
-                points[idx] = Integer.toString(idx);
-            }
-
-            Thread.sleep(200);
-
-            final AtomicBoolean stop = new AtomicBoolean();
-            final AtomicReference<Exception> err = new AtomicReference<>();
-
-            IgniteInternalFuture<?> putFut = GridTestUtils.runMultiThreadedAsync(new Callable<Void>() {
-                @Override public Void call() throws Exception {
-                    WKTReader r = new WKTReader();
-
-                    ThreadLocalRandom rnd = ThreadLocalRandom.current();
-
-                    while (!stop.get()) {
-                        int cacheIdx = rnd.nextInt(0, 3);
-
-                        IgniteCache<Integer, EnemyCamp> cache = cacheIdx == 0 ? cache1 : cacheIdx == 1 ? cache2 : cache3;
-
-                        int idx = rnd.nextInt(CNT);
-                        int x = rnd.nextInt(1, 100);
-                        int y = rnd.nextInt(1, 100);
-
-                        cache.getAndPut(idx, new EnemyCamp(r.read("POINT(" + x + " " + y + ")"), Integer.toString(idx)));
-
-                        U.sleep(50);
-                    }
-
-                    return null;
-                }
-            }, Runtime.getRuntime().availableProcessors(), "put-thread");
-
-            IgniteInternalFuture<?> qryFut = GridTestUtils.runMultiThreadedAsync(new Callable<Void>() {
-                @Override public Void call() throws Exception {
-                    WKTReader r = new WKTReader();
-
-                    ThreadLocalRandom rnd = ThreadLocalRandom.current();
-
-                    while (!stop.get()) {
-                        try {
-                            int cacheIdx = rnd.nextInt(0, 3);
-
-                            IgniteCache<Integer, EnemyCamp> cache = cacheIdx == 0 ? cache1 : cacheIdx == 1 ? cache2 : cache3;
-
-                            SqlQuery<Integer, EnemyCamp> qry = new SqlQuery<>(
-                                EnemyCamp.class, "coords && ?");
-
-                            Collection<Cache.Entry<Integer, EnemyCamp>> res = cache.query(qry.setArgs(
-                                r.read("POLYGON((0 0, 0 100, 100 100, 100 0, 0 0))"))).getAll();
-
-                            checkPoints(res, points);
-
-                            U.sleep(5);
-                        }
-                        catch (Exception e) {
-                            err.set(e);
-
-                            stop.set(true);
-
-                            break;
-                        }
-                    }
-
-                    return null;
-                }
-            }, 4, "qry-thread");
-
-            U.sleep(6000L);
-
-            stop.set(true);
-
-            putFut.get();
-            qryFut.get();
-
-            Exception err0 = err.get();
-
-            if (err0 != null)
-                throw err0;
-        }
-        finally {
-            cache1.destroy();
-        }
-    }
-
-    /**
-     * Check whether result contains all required points.
-     *
-     * @param res Result.
-     * @param points Expected points.
-     */
-    private void checkPoints(Collection<Cache.Entry<Integer, EnemyCamp>> res, String... points) {
-        Set<String> set = new HashSet<>(Arrays.asList(points));
-
-        assertEquals(set.size(), res.size());
-
-        for (Cache.Entry<Integer, EnemyCamp> e : res)
-            assertTrue(set.remove(e.getValue().name));
-    }
-
-    /**
-     * @throws Exception if fails.
-     */
-    public void testSegmentedGeoIndexJoin() throws Exception {
-        IgniteCache<Integer, Enemy> c1 = ignite(0).getOrCreateCache(
-            this.<Integer, Enemy>cacheConfig("enemy", true, Integer.class, Enemy.class));
-        IgniteCache<Integer, EnemyCamp> c2 = ignite(0).getOrCreateCache(
-            this.<Integer, EnemyCamp>cacheConfig("camp", true, Integer.class, EnemyCamp.class));
-
-        try {
-            fillCache();
-
-            checkDistributedQuery();
-
-            checkLocalQuery();
-        }
-        finally {
-            c1.destroy();
-            c2.destroy();
-        }
-    }
-
-    /**
-     * @throws Exception if fails.
-     */
-    public void testSegmentedGeoIndexJoin2() throws Exception {
-        IgniteCache<Integer, Enemy> c1 = ignite(0).getOrCreateCache(
-            this.<Integer, Enemy>cacheConfig("enemy", true, Integer.class, Enemy.class));
-        IgniteCache<Integer, EnemyCamp> c2 = ignite(0).getOrCreateCache(
-            this.<Integer, EnemyCamp>cacheConfig("camp", false, Integer.class, EnemyCamp.class));
-
-        try {
-            fillCache();
-
-            checkDistributedQuery();
-
-            checkLocalQuery();
-        }
-        finally {
-            c1.destroy();
-            c2.destroy();
-        }
-    }
-
-    /** */
-    private void checkDistributedQuery() throws ParseException {
-        IgniteCache<Integer, Enemy> c1 = ignite(0).cache("enemy");
-        IgniteCache<Integer, EnemyCamp> c2 = ignite(0).cache("camp");
-
-        final Geometry lethalArea = new WKTReader().read("POLYGON((30 30, 30 70, 70 70, 70 30, 30 30))");
-
-        int expectedEnemies = 0;
-
-        for (Cache.Entry<Integer, Enemy> e : c1) {
-            final Integer campID = e.getValue().campId;
-
-            if (30 <= campID && campID < ENEMYCAMP_SAMPLES_COUNT) {
-                final EnemyCamp camp = c2.get(campID);
-
-                if (lethalArea.covers(camp.coords))
-                    expectedEnemies++;
-            }
-        }
-
-        final SqlFieldsQuery query = new SqlFieldsQuery("select e._val, c._val from \"enemy\".Enemy e, \"camp\".EnemyCamp c " +
-            "where e.campId = c._key and c.coords && ?").setArgs(lethalArea);
-
-        List<List<?>> result = c1.query(query.setDistributedJoins(true)).getAll();
-
-        assertEquals(expectedEnemies, result.size());
-    }
-
-    /** */
-    private void checkLocalQuery() throws ParseException {
-        IgniteCache<Integer, Enemy> c1 = ignite(0).cache("enemy");
-        IgniteCache<Integer, EnemyCamp> c2 = ignite(0).cache("camp");
-
-        final Geometry lethalArea = new WKTReader().read("POLYGON((30 30, 30 70, 70 70, 70 30, 30 30))");
-
-        Set<Integer> localCampsIDs = new HashSet<>();
-
-        for(Cache.Entry<Integer, EnemyCamp> e : c2.localEntries())
-            localCampsIDs.add(e.getKey());
-
-        int expectedEnemies = 0;
-
-        for (Cache.Entry<Integer, Enemy> e : c1.localEntries()) {
-            final Integer campID = e.getValue().campId;
-
-            if (localCampsIDs.contains(campID)) {
-                final EnemyCamp camp = c2.get(campID);
-
-                if (lethalArea.covers(camp.coords))
-                    expectedEnemies++;
-            }
-        }
-
-        final SqlFieldsQuery query = new SqlFieldsQuery("select e._val, c._val from \"enemy\".Enemy e, \"camp\".EnemyCamp c " +
-            "where e.campId = c._key and c.coords && ?").setArgs(lethalArea);
-
-        List<List<?>> result = c1.query(query.setLocal(true)).getAll();
-
-        assertEquals(expectedEnemies, result.size());
-    }
-
-    /** */
-    private void fillCache() throws ParseException {
-        IgniteCache<Integer, Enemy> c1 = ignite(0).cache("enemy");
-        IgniteCache<Integer, EnemyCamp> c2 = ignite(0).cache("camp");
-
-        final ThreadLocalRandom rnd = ThreadLocalRandom.current();
-
-        WKTReader r = new WKTReader();
-
-        for (int i = 0; i < ENEMYCAMP_SAMPLES_COUNT; i++) {
-            final String point = String.format("POINT(%d %d)", rnd.nextInt(100), rnd.nextInt(100));
-
-            c2.put(i, new EnemyCamp(r.read(point), "camp-" + i));
-        }
-
-        for (int i = 0; i < ENEMY_SAMPLES_COUNT; i++) {
-            int campID = 30 + rnd.nextInt(ENEMYCAMP_SAMPLES_COUNT + 10);
-
-            c1.put(i, new Enemy(campID, "enemy-" + i));
-        }
-    }
-
-    /**
-     *
-     */
-    private static class Enemy {
-        /** */
-        @QuerySqlField
-        int campId;
-
-        /** */
-        @QuerySqlField
-        String name;
-
-        /**
-         * @param campId Camp ID.
-         * @param name Name.
-         */
-        public Enemy(int campId, String name) {
-            this.campId = campId;
-            this.name = name;
-        }
-    }
-
-    /**
-     *
-     */
-    protected static class EnemyCamp implements Serializable {
-        /** */
-        @QuerySqlField(index = true)
-        Geometry coords;
-
-        /** */
-        @QuerySqlField
-        private String name;
-
-        /**
-         * @param coords Coordinates.
-         * @param name Name.
-         */
-        EnemyCamp(Geometry coords, String name) {
-            this.coords = coords;
-            this.name = name;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingSegmentedGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingSegmentedGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingSegmentedGeoSelfTest.java
deleted file mode 100644
index eb0fd0f..0000000
--- a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/GridH2IndexingSegmentedGeoSelfTest.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.processors.query.h2;
-
-import org.apache.ignite.configuration.CacheConfiguration;
-
-/**
- * Test for segmented geo index.
- */
-public class GridH2IndexingSegmentedGeoSelfTest extends GridH2IndexingGeoSelfTest {
-    /** */
-    private static int QRY_PARALLELISM_LVL = 7;
-
-    /** {@inheritDoc} */
-    @Override
-    protected <K, V> CacheConfiguration<K, V> cacheConfig(String name, boolean partitioned,
-        Class<?>... idxTypes) throws Exception {
-        final CacheConfiguration<K, V> ccfg = super.cacheConfig(name, partitioned, idxTypes);
-
-        return ccfg.setQueryParallelism(partitioned ? QRY_PARALLELISM_LVL : 1);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingAbstractGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingAbstractGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingAbstractGeoSelfTest.java
new file mode 100644
index 0000000..914bb62
--- /dev/null
+++ b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingAbstractGeoSelfTest.java
@@ -0,0 +1,673 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.query.h2;
+
+import com.vividsolutions.jts.geom.Geometry;
+import com.vividsolutions.jts.io.ParseException;
+import com.vividsolutions.jts.io.WKTReader;
+import org.apache.ignite.IgniteCache;
+import org.apache.ignite.cache.CacheAtomicityMode;
+import org.apache.ignite.cache.CacheMode;
+import org.apache.ignite.cache.QueryEntity;
+import org.apache.ignite.cache.QueryIndex;
+import org.apache.ignite.cache.QueryIndexType;
+import org.apache.ignite.cache.query.SqlFieldsQuery;
+import org.apache.ignite.cache.query.SqlQuery;
+import org.apache.ignite.cache.query.annotations.QuerySqlField;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.internal.IgniteInternalFuture;
+import org.apache.ignite.internal.binary.BinaryMarshaller;
+import org.apache.ignite.internal.processors.cache.GridCacheAbstractSelfTest;
+import org.apache.ignite.internal.processors.query.QueryUtils;
+import org.apache.ignite.internal.util.GridStringBuilder;
+import org.apache.ignite.internal.util.typedef.internal.SB;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.testframework.GridTestUtils;
+
+import javax.cache.Cache;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Geo-indexing test.
+ */
+public abstract class H2IndexingAbstractGeoSelfTest extends GridCacheAbstractSelfTest {
+    /** */
+    private static final int CNT = 100;
+
+    /** */
+    private static final long DUR = 60000L;
+
+    /** Number of generated samples. */
+    public static final int ENEMYCAMP_SAMPLES_COUNT = 500;
+
+    /** Number of generated samples. */
+    public static final int ENEMY_SAMPLES_COUNT = 1000;
+
+    /** */
+    private static final int QRY_PARALLELISM_LVL = 7;
+
+    /** Binary marshaller flag. */
+    private final boolean binary;
+
+    /** Segmented index flag. */
+    private final boolean segmented;
+
+    /**
+     * Constructor.
+     *
+     * @param binary Binary marshaller flag.
+     * @param segmented Segmented index flag.
+     */
+    protected H2IndexingAbstractGeoSelfTest(boolean binary, boolean segmented) {
+        this.binary = binary;
+        this.segmented = segmented;
+    }
+
+    /** {@inheritDoc} */
+    @Override protected int gridCount() {
+        return 3;
+    }
+
+    /** {@inheritDoc} */
+    @Override protected long getTestTimeout() {
+        return DUR * 3;
+    }
+
+    /** {@inheritDoc} */
+    @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
+        IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
+
+        if (binary)
+            cfg.setMarshaller(new BinaryMarshaller());
+
+        return cfg;
+    }
+
+    /**
+     * Create cache.
+     *
+     * @param name Name.
+     * @param partitioned Partitioned flag.
+     * @param keyCls Key class.
+     * @param valCls Value class.
+     * @return Cache.
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings("unchecked")
+    protected <K, V> IgniteCache<K, V> createCache(String name, boolean partitioned, Class<?> keyCls, Class<?> valCls)
+        throws Exception {
+        return createCache(name, partitioned, keyCls, valCls, false);
+    }
+
+    /**
+     * Create cache.
+     *
+     * @param name Name.
+     * @param partitioned Partitioned flag.
+     * @param keyCls Key class.
+     * @param valCls Value class.
+     * @param dynamicIdx Whether index should be created dynamically.
+     * @return Cache.
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings("unchecked")
+    protected <K, V> IgniteCache<K, V> createCache(String name, boolean partitioned, Class<?> keyCls, Class<?> valCls,
+        boolean dynamicIdx) throws Exception {
+        CacheConfiguration<K, V> ccfg = cacheConfig(name, partitioned, keyCls, valCls);
+
+        if (dynamicIdx) {
+            Collection<QueryEntity> entities = ccfg.getQueryEntities();
+
+            assertEquals(1, entities.size());
+
+            QueryEntity entity = entities.iterator().next();
+
+            Collection<QueryIndex> idxs = new ArrayList<>(entity.getIndexes());
+
+            entity.clearIndexes();
+
+            IgniteCache<K, V> cache = grid(0).getOrCreateCache(ccfg);
+
+            // Process indexes dynamically.
+            for (QueryIndex idx : idxs)
+                createDynamicIndex(cache, entity, idx);
+
+            return cache;
+        }
+        else
+            return grid(0).getOrCreateCache(ccfg);
+    }
+
+    /**
+     * Create dynamic index.
+     *
+     * @param cache Cache.
+     * @param entity Entity.
+     * @param idx Index.
+     * @throws Exception If failed.
+     */
+    private void createDynamicIndex(IgniteCache cache, QueryEntity entity, QueryIndex idx) throws Exception {
+        boolean spatial = idx.getIndexType() == QueryIndexType.GEOSPATIAL;
+
+        GridStringBuilder sb = new SB("CREATE ")
+            .a(spatial ? "SPATIAL " : "")
+            .a("INDEX ")
+            .a(idx.getName())
+            .a(" ON ")
+            .a(QueryUtils.tableName(entity))
+            .a(" (");
+
+        boolean first = true;
+
+        for (Map.Entry<String, Boolean> fieldEntry : idx.getFields().entrySet()) {
+            if (first)
+                first = false;
+            else
+                sb.a(", ");
+
+            String name = fieldEntry.getKey();
+            boolean asc = fieldEntry.getValue();
+
+            sb.a("\"" + name + "\"").a(" ").a(asc ? "ASC" : "DESC");
+        }
+
+        sb.a(')');
+
+        String sql = sb.toString();
+
+        cache.query(new SqlFieldsQuery(sql)).getAll();
+    }
+
+    /**
+     * @param name Cache name.
+     * @param partitioned Partition or replicated cache.
+     * @param keyCls Key class.
+     * @param valCls Value class.
+     * @return Cache configuration.
+     */
+    private <K, V> CacheConfiguration<K, V> cacheConfig(String name, boolean partitioned, Class<?> keyCls,
+        Class<?> valCls) throws Exception {
+        CacheConfiguration<K, V> ccfg = new CacheConfiguration<K, V>(name)
+            .setName(name)
+            .setCacheMode(partitioned ? CacheMode.PARTITIONED : CacheMode.REPLICATED)
+            .setAtomicityMode(CacheAtomicityMode.ATOMIC)
+            .setIndexedTypes(keyCls, valCls);
+
+        if (segmented)
+            ccfg.setQueryParallelism(partitioned ? QRY_PARALLELISM_LVL : 1);
+
+        return ccfg;
+    }
+
+    /**
+     * @throws Exception If failed.
+     */
+    public void testPrimitiveGeometry() throws Exception {
+        IgniteCache<Long, Geometry> cache = createCache("geom", true, Long.class, Geometry.class);
+
+        try {
+            WKTReader r = new WKTReader();
+
+            for (long i = 0; i < 100; i++)
+                cache.put(i, r.read("POINT(" + i + " " + i + ")"));
+
+            String plan = cache.query(new SqlFieldsQuery("explain select _key from Geometry where _val && ?")
+                .setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))")).setLocal(true))
+                .getAll().get(0).get(0).toString().toLowerCase();
+
+            assertTrue("__ explain: " + plan, plan.contains("_val_idx"));
+        }
+        finally {
+            cache.destroy();
+        }
+    }
+
+    /**
+     * Test geo-index (static).
+     *
+     * @throws Exception If failed.
+     */
+    public void testGeo() throws Exception {
+        checkGeo(false);
+    }
+
+    /**
+     * Test geo-index (dynamic).
+     *
+     * @throws Exception If failed.
+     */
+    public void testGeoDynamic() throws Exception {
+        checkGeo(true);
+    }
+
+    /**
+     * Check geo-index (dynamic).
+     *
+     * @param dynamic Whether index should be created dynamically.
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings({"unchecked", "ConstantConditions"})
+    private void checkGeo(boolean dynamic) throws Exception {
+        IgniteCache<Integer, EnemyCamp> cache = createCache("camp", true, Integer.class, EnemyCamp.class, dynamic);
+
+        try {
+            WKTReader r = new WKTReader();
+
+            cache.getAndPut(0, new EnemyCamp(r.read("POINT(25 75)"), "A"));
+            cache.getAndPut(1, new EnemyCamp(r.read("POINT(70 70)"), "B"));
+            cache.getAndPut(2, new EnemyCamp(r.read("POINT(70 30)"), "C"));
+            cache.getAndPut(3, new EnemyCamp(r.read("POINT(75 25)"), "D"));
+
+            SqlQuery<Integer, EnemyCamp> qry = new SqlQuery(EnemyCamp.class, "coords && ?");
+
+            Collection<Cache.Entry<Integer, EnemyCamp>> res = cache.query(
+                qry.setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))"))).getAll();
+
+            checkPoints(res, "A");
+
+            res = cache.query(
+                qry.setArgs(r.read("POLYGON((10 5, 10 35, 70 30, 75 25, 10 5))"))).getAll();
+
+            checkPoints(res, "C", "D");
+
+            // Move B to the first polygon.
+            cache.getAndPut(1, new EnemyCamp(r.read("POINT(20 75)"), "B"));
+
+            res = cache.query(
+                qry.setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))"))).getAll();
+
+            checkPoints(res, "A", "B");
+
+            // Move B to the second polygon.
+            cache.getAndPut(1, new EnemyCamp(r.read("POINT(30 30)"), "B"));
+
+            res = cache.query(
+                qry.setArgs(r.read("POLYGON((10 5, 10 35, 70 30, 75 25, 10 5))"))).getAll();
+
+            checkPoints(res, "B", "C", "D");
+
+            // Remove B.
+            cache.getAndRemove(1);
+
+            res = cache.query(
+                qry.setArgs(r.read("POLYGON((5 70, 5 80, 30 80, 30 70, 5 70))"))).getAll();
+
+            checkPoints(res, "A");
+
+            res = cache.query(
+                qry.setArgs(r.read("POLYGON((10 5, 10 35, 70 30, 75 25, 10 5))"))).getAll();
+
+            checkPoints(res, "C", "D");
+
+            // Check explain request.
+            String plan = cache.query(new SqlFieldsQuery("explain select * from EnemyCamp " +
+                "where coords && 'POINT(25 75)'")).getAll().get(0).get(0).toString().toLowerCase();
+
+            assertTrue("__ explain: " + plan, plan.contains("coords_idx"));
+
+            if (dynamic)
+                cache.query(new SqlFieldsQuery("DROP INDEX coords_idx")).getAll();
+        }
+        finally {
+            cache.destroy();
+        }
+    }
+
+    /**
+     * Test geo indexing multithreaded.
+     *
+     * @throws Exception If failed.
+     */
+    public void testGeoMultithreaded() throws Exception {
+        checkGeoMultithreaded(false);
+    }
+
+    /**
+     * Test geo indexing multithreaded with dynamic index creation.
+     *
+     * @throws Exception If failed.
+     */
+    public void testGeoMultithreadedDynamic() throws Exception {
+        checkGeoMultithreaded(true);
+    }
+
+    /**
+     * Check geo indexing multithreaded with dynamic index creation.
+     *
+     * @param dynamic Whether index should be created dynamically.
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings("unchecked")
+    private void checkGeoMultithreaded(boolean dynamic) throws Exception {
+        final IgniteCache<Integer, EnemyCamp> cache1 =
+            createCache("camp", true, Integer.class, EnemyCamp.class, dynamic);
+
+        final IgniteCache<Integer, EnemyCamp> cache2 = grid(1).cache("camp");
+        final IgniteCache<Integer, EnemyCamp> cache3 = grid(2).cache("camp");
+
+        try {
+            final String[] points = new String[CNT];
+
+            WKTReader r = new WKTReader();
+
+            ThreadLocalRandom rnd = ThreadLocalRandom.current();
+
+            for (int idx = 0; idx < CNT; idx++) {
+                int x = rnd.nextInt(1, 100);
+                int y = rnd.nextInt(1, 100);
+
+                cache1.getAndPut(idx, new EnemyCamp(r.read("POINT(" + x + " " + y + ")"), Integer.toString(idx)));
+
+                points[idx] = Integer.toString(idx);
+            }
+
+            Thread.sleep(200);
+
+            final AtomicBoolean stop = new AtomicBoolean();
+            final AtomicReference<Exception> err = new AtomicReference<>();
+
+            IgniteInternalFuture<?> putFut = GridTestUtils.runMultiThreadedAsync(new Callable<Void>() {
+                @Override public Void call() throws Exception {
+                    WKTReader r = new WKTReader();
+
+                    ThreadLocalRandom rnd = ThreadLocalRandom.current();
+
+                    while (!stop.get()) {
+                        int cacheIdx = rnd.nextInt(0, 3);
+
+                        IgniteCache<Integer, EnemyCamp> cache = cacheIdx == 0 ? cache1 : cacheIdx == 1 ? cache2 : cache3;
+
+                        int idx = rnd.nextInt(CNT);
+                        int x = rnd.nextInt(1, 100);
+                        int y = rnd.nextInt(1, 100);
+
+                        cache.getAndPut(idx, new EnemyCamp(r.read("POINT(" + x + " " + y + ")"), Integer.toString(idx)));
+
+                        U.sleep(50);
+                    }
+
+                    return null;
+                }
+            }, Runtime.getRuntime().availableProcessors(), "put-thread");
+
+            IgniteInternalFuture<?> qryFut = GridTestUtils.runMultiThreadedAsync(new Callable<Void>() {
+                @Override public Void call() throws Exception {
+                    WKTReader r = new WKTReader();
+
+                    ThreadLocalRandom rnd = ThreadLocalRandom.current();
+
+                    while (!stop.get()) {
+                        try {
+                            int cacheIdx = rnd.nextInt(0, 3);
+
+                            IgniteCache<Integer, EnemyCamp> cache = cacheIdx == 0 ? cache1 : cacheIdx == 1 ? cache2 : cache3;
+
+                            SqlQuery<Integer, EnemyCamp> qry = new SqlQuery<>(
+                                EnemyCamp.class, "coords && ?");
+
+                            Collection<Cache.Entry<Integer, EnemyCamp>> res = cache.query(qry.setArgs(
+                                r.read("POLYGON((0 0, 0 100, 100 100, 100 0, 0 0))"))).getAll();
+
+                            checkPoints(res, points);
+
+                            U.sleep(5);
+                        }
+                        catch (Exception e) {
+                            err.set(e);
+
+                            stop.set(true);
+
+                            break;
+                        }
+                    }
+
+                    return null;
+                }
+            }, 4, "qry-thread");
+
+            U.sleep(6000L);
+
+            stop.set(true);
+
+            putFut.get();
+            qryFut.get();
+
+            Exception err0 = err.get();
+
+            if (err0 != null)
+                throw err0;
+        }
+        finally {
+            cache1.destroy();
+        }
+    }
+
+    /**
+     * Check whether result contains all required points.
+     *
+     * @param res Result.
+     * @param points Expected points.
+     */
+    private void checkPoints(Collection<Cache.Entry<Integer, EnemyCamp>> res, String... points) {
+        Set<String> set = new HashSet<>(Arrays.asList(points));
+
+        assertEquals(set.size(), res.size());
+
+        for (Cache.Entry<Integer, EnemyCamp> e : res)
+            assertTrue(set.remove(e.getValue().name));
+    }
+
+    /**
+     * Test segmented geo-index join on PARTITIONED cache.
+     *
+     * @throws Exception if fails.
+     */
+    public void testSegmentedGeoIndexJoinPartitioned() throws Exception {
+        checkSegmentedGeoIndexJoin(true, false);
+    }
+
+    /**
+     * Test segmented geo-index join on PARTITIONED cache with dynamically created index.
+     *
+     * @throws Exception if fails.
+     */
+    public void testSegmentedGeoIndexJoinPartitionedDynamic() throws Exception {
+        checkSegmentedGeoIndexJoin(true, true);
+    }
+
+    /**
+     * Test segmented geo-index join on REPLICATED cache.
+     *
+     * @throws Exception if fails.
+     */
+    public void testSegmentedGeoIndexJoinReplicated() throws Exception {
+        checkSegmentedGeoIndexJoin(false, false);
+    }
+
+    /**
+     * Test segmented geo-index join on REPLICATED cache with dynamically created index.
+     *
+     * @throws Exception if fails.
+     */
+    public void testSegmentedGeoIndexJoinReplicatedDynamic() throws Exception {
+        checkSegmentedGeoIndexJoin(false, true);
+    }
+
+    /**
+     * Check segmented geo-index join.
+     *
+     * @param partitioned Partitioned flag.
+     * @param dynamic Whether index should be created dynamically.
+     * @throws Exception If failed.
+     */
+    private void checkSegmentedGeoIndexJoin(boolean partitioned, boolean dynamic) throws Exception {
+        IgniteCache<Integer, Enemy> c1 = createCache("enemy", true, Integer.class, Enemy.class);
+        IgniteCache<Integer, EnemyCamp> c2 = createCache("camp", partitioned, Integer.class, EnemyCamp.class, dynamic);
+
+        try {
+            final ThreadLocalRandom rnd = ThreadLocalRandom.current();
+
+            WKTReader r = new WKTReader();
+
+            for (int i = 0; i < ENEMYCAMP_SAMPLES_COUNT; i++) {
+                final String point = String.format("POINT(%d %d)", rnd.nextInt(100), rnd.nextInt(100));
+
+                c2.put(i, new EnemyCamp(r.read(point), "camp-" + i));
+            }
+
+            for (int i = 0; i < ENEMY_SAMPLES_COUNT; i++) {
+                int campID = 30 + rnd.nextInt(ENEMYCAMP_SAMPLES_COUNT + 10);
+
+                c1.put(i, new Enemy(campID, "enemy-" + i));
+            }
+
+            checkDistributedQuery();
+
+            checkLocalQuery();
+        }
+        finally {
+            c1.destroy();
+            c2.destroy();
+        }
+    }
+
+    /**
+     * Check distributed query.
+     *
+     * @throws ParseException If failed.
+     */
+    private void checkDistributedQuery() throws ParseException {
+        IgniteCache<Integer, Enemy> c1 = grid(0).cache("enemy");
+        IgniteCache<Integer, EnemyCamp> c2 = grid(0).cache("camp");
+
+        final Geometry lethalArea = new WKTReader().read("POLYGON((30 30, 30 70, 70 70, 70 30, 30 30))");
+
+        int expectedEnemies = 0;
+
+        for (Cache.Entry<Integer, Enemy> e : c1) {
+            final Integer campID = e.getValue().campId;
+
+            if (30 <= campID && campID < ENEMYCAMP_SAMPLES_COUNT) {
+                final EnemyCamp camp = c2.get(campID);
+
+                if (lethalArea.covers(camp.coords))
+                    expectedEnemies++;
+            }
+        }
+
+        final SqlFieldsQuery query = new SqlFieldsQuery("select e._val, c._val from \"enemy\".Enemy e, \"camp\".EnemyCamp c " +
+            "where e.campId = c._key and c.coords && ?").setArgs(lethalArea);
+
+        List<List<?>> result = c1.query(query.setDistributedJoins(true)).getAll();
+
+        assertEquals(expectedEnemies, result.size());
+    }
+
+    /**
+     * Check local query.
+     *
+     * @throws ParseException If failed.
+     */
+    private void checkLocalQuery() throws ParseException {
+        IgniteCache<Integer, Enemy> c1 = grid(0).cache("enemy");
+        IgniteCache<Integer, EnemyCamp> c2 = grid(0).cache("camp");
+
+        final Geometry lethalArea = new WKTReader().read("POLYGON((30 30, 30 70, 70 70, 70 30, 30 30))");
+
+        Set<Integer> localCampsIDs = new HashSet<>();
+
+        for(Cache.Entry<Integer, EnemyCamp> e : c2.localEntries())
+            localCampsIDs.add(e.getKey());
+
+        int expectedEnemies = 0;
+
+        for (Cache.Entry<Integer, Enemy> e : c1.localEntries()) {
+            final Integer campID = e.getValue().campId;
+
+            if (localCampsIDs.contains(campID)) {
+                final EnemyCamp camp = c2.get(campID);
+
+                if (lethalArea.covers(camp.coords))
+                    expectedEnemies++;
+            }
+        }
+
+        final SqlFieldsQuery query = new SqlFieldsQuery("select e._val, c._val from \"enemy\".Enemy e, " +
+            "\"camp\".EnemyCamp c where e.campId = c._key and c.coords && ?").setArgs(lethalArea);
+
+        List<List<?>> result = c1.query(query.setLocal(true)).getAll();
+
+        assertEquals(expectedEnemies, result.size());
+    }
+
+    /**
+     *
+     */
+    private static class Enemy {
+        /** */
+        @QuerySqlField
+        int campId;
+
+        /** */
+        @QuerySqlField
+        String name;
+
+        /**
+         * @param campId Camp ID.
+         * @param name Name.
+         */
+        public Enemy(int campId, String name) {
+            this.campId = campId;
+            this.name = name;
+        }
+    }
+
+    /**
+     *
+     */
+    protected static class EnemyCamp implements Serializable {
+        /** */
+        @QuerySqlField(index = true)
+        Geometry coords;
+
+        /** */
+        @QuerySqlField
+        private String name;
+
+        /**
+         * @param coords Coordinates.
+         * @param name Name.
+         */
+        EnemyCamp(Geometry coords, String name) {
+            this.coords = coords;
+            this.name = name;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinaryGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinaryGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinaryGeoSelfTest.java
new file mode 100644
index 0000000..eb3ec6e
--- /dev/null
+++ b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinaryGeoSelfTest.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.query.h2;
+
+/**
+ * Geo-indexing test for binary mode.
+ */
+public class H2IndexingBinaryGeoSelfTest extends H2IndexingAbstractGeoSelfTest {
+    /**
+     * Constructor.
+     */
+    public H2IndexingBinaryGeoSelfTest() {
+        super(true, false);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinarySegmentedGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinarySegmentedGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinarySegmentedGeoSelfTest.java
new file mode 100644
index 0000000..fe94a3d
--- /dev/null
+++ b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingBinarySegmentedGeoSelfTest.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.query.h2;
+
+/**
+ * Geo-indexing test for binary mode and segmented cache.
+ */
+public class H2IndexingBinarySegmentedGeoSelfTest extends H2IndexingAbstractGeoSelfTest {
+    /**
+     * Constructor.
+     */
+    public H2IndexingBinarySegmentedGeoSelfTest() {
+        super(true, true);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingGeoSelfTest.java
new file mode 100644
index 0000000..280c946
--- /dev/null
+++ b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingGeoSelfTest.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.query.h2;
+
+/**
+ * Geo-indexing test.
+ */
+public class H2IndexingGeoSelfTest extends H2IndexingAbstractGeoSelfTest {
+    /**
+     * Constructor.
+     */
+    public H2IndexingGeoSelfTest() {
+        super(false, false);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingSegmentedGeoSelfTest.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingSegmentedGeoSelfTest.java b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingSegmentedGeoSelfTest.java
new file mode 100644
index 0000000..b2cf781
--- /dev/null
+++ b/modules/geospatial/src/test/java/org/apache/ignite/internal/processors/query/h2/H2IndexingSegmentedGeoSelfTest.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.query.h2;
+
+/**
+ * Test for segmented geo index.
+ */
+public class H2IndexingSegmentedGeoSelfTest extends H2IndexingAbstractGeoSelfTest {
+    /**
+     * Constructor.
+     */
+    public H2IndexingSegmentedGeoSelfTest() {
+        super(false, true);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/f923bc91/modules/geospatial/src/test/java/org/apache/ignite/testsuites/GeoSpatialIndexingTestSuite.java
----------------------------------------------------------------------
diff --git a/modules/geospatial/src/test/java/org/apache/ignite/testsuites/GeoSpatialIndexingTestSuite.java b/modules/geospatial/src/test/java/org/apache/ignite/testsuites/GeoSpatialIndexingTestSuite.java
index 3907b9e..2395b93 100644
--- a/modules/geospatial/src/test/java/org/apache/ignite/testsuites/GeoSpatialIndexingTestSuite.java
+++ b/modules/geospatial/src/test/java/org/apache/ignite/testsuites/GeoSpatialIndexingTestSuite.java
@@ -18,9 +18,10 @@
 package org.apache.ignite.testsuites;
 
 import junit.framework.TestSuite;
-import org.apache.ignite.internal.processors.query.h2.GridBinaryH2IndexingGeoSelfTest;
-import org.apache.ignite.internal.processors.query.h2.GridH2IndexingGeoSelfTest;
-import org.apache.ignite.internal.processors.query.h2.GridH2IndexingSegmentedGeoSelfTest;
+import org.apache.ignite.internal.processors.query.h2.H2IndexingBinaryGeoSelfTest;
+import org.apache.ignite.internal.processors.query.h2.H2IndexingBinarySegmentedGeoSelfTest;
+import org.apache.ignite.internal.processors.query.h2.H2IndexingGeoSelfTest;
+import org.apache.ignite.internal.processors.query.h2.H2IndexingSegmentedGeoSelfTest;
 
 /**
  * Geospatial indexing tests.
@@ -33,10 +34,11 @@ public class GeoSpatialIndexingTestSuite extends TestSuite {
     public static TestSuite suite() throws Exception {
         TestSuite suite = new TestSuite("H2 Geospatial Indexing Test Suite");
 
-        // Geo.
-        suite.addTestSuite(GridH2IndexingGeoSelfTest.class);
-        suite.addTestSuite(GridBinaryH2IndexingGeoSelfTest.class);
-        suite.addTestSuite(GridH2IndexingSegmentedGeoSelfTest.class);
+        suite.addTestSuite(H2IndexingGeoSelfTest.class);
+        suite.addTestSuite(H2IndexingSegmentedGeoSelfTest.class);
+
+        suite.addTestSuite(H2IndexingBinaryGeoSelfTest.class);
+        suite.addTestSuite(H2IndexingBinarySegmentedGeoSelfTest.class);
 
         return suite;
     }


Mime
View raw message