diff --git a/CHANGELOG.md b/CHANGELOG.md
index bc9e9afa5..c0619bfb5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,10 +2,17 @@
## 3.8.2 (Upcoming)
-* Show error message when unsupported PER PARTITION LIMIT option is used
+* Optimize columns mapping (improves indexing performance)
+* Add generic support for index partitioning
+* Add token-based index partitioner
+* Upgrade to Scala 2.12.0
+* Avoid not required string interpolations in logging
+* Avoid not required string interpolations in tracing
* Add support for geospatial shapes in bounding box search
* Add support for geospatial shapes in distance search
-* Improve performance of needs read before write calculation
+* Improve performance of needs before write calculation
+* Show error message when unsupported PER PARTITION LIMIT option is used
+* Upgrade all JSON serializers to FasterXML Jackson 2.8.6
## 3.8.1 (October 17, 2016)
@@ -97,6 +104,10 @@
## 3.0.3.1 (March 04, 2016)
* Fix performance issues with ClusteringIndexNamesFilter
+<<<<<<< HEAD
+=======
+
+>>>>>>> branch-3.9
* Add indexing of WKT geographical shapes (point, linestring, polygon and their multipart)
* Add search by WKT geographical shapes (point, linestring, polygon and their multipart)
* Add API for search-time transformation of WKT geographical shapes
diff --git a/Jenkinsfile b/Jenkinsfile
new file mode 100644
index 000000000..b4c92f9b2
--- /dev/null
+++ b/Jenkinsfile
@@ -0,0 +1,56 @@
+@Library('libpipelines@feature/multibranch') _
+
+hose {
+ EMAIL = 'cassandra'
+ MODULE = 'cassandra-lucene-index'
+ DEVTIMEOUT = 50
+ RELEASETIMEOUT = 30
+ FOSS = true
+ REPOSITORY = 'cassandra-lucene-index'
+ LANG = 'java'
+ PKGMODULES = ['plugin']
+ PKGMODULESNAMES = ['stratio-cassandra-lucene-index']
+ DEBARCH = 'all'
+ RPMARCH = 'noarch'
+ EXPOSED_PORTS = [9042, 7199, 8000]
+
+ PARALLELIZE_AT = true
+
+ ATSERVICES = [
+ ['CASSANDRA': [
+ 'image': 'stratio/cassandra-lucene-index:%%VERSION',
+ 'volumes':[
+ 'jts:1.14.0'],
+ 'env': [
+ 'MAX_HEAP=256M',
+ 'START_JOLOKIA=true',
+ 'JOLOKIA_OPTS="port=8000,host=$(hostname --ip)"'],
+ 'sleep': 10]],
+ ]
+
+ ATPARAMETERS = """
+ | -Dit.host=%%CASSANDRA
+ | -Dit.monitor_service=jolokia
+ | -Dit.monitor_services_url=%%CASSANDRA:8000
+ | -DJACOCO_SERVER=%%CASSANDRA
+ | -Dit-embedded=false"""
+
+ DEV = { config ->
+
+ doCompile(config)
+ doUT(config)
+ doPackage(config)
+
+ parallel(DOC: {
+ doDoc(config)
+ }, QC: {
+ doStaticAnalysis(config)
+ }, DEPLOY: {
+ doDeploy(config)
+ }, DOCKER : {
+ doDocker(config)
+ }, failFast: config.FAILFAST)
+
+ doAT(config)
+ }
+}
diff --git a/builder/pom.xml b/builder/pom.xml
index 490af8095..799b1dea3 100644
--- a/builder/pom.xml
+++ b/builder/pom.xml
@@ -38,12 +38,12 @@
com.fasterxml.jackson.core
jackson-core
- 2.8.0
+ 2.8.4
com.fasterxml.jackson.core
jackson-databind
- 2.8.0
+ 2.8.4
junit
diff --git a/builder/src/main/java/com/stratio/cassandra/lucene/builder/Builder.java b/builder/src/main/java/com/stratio/cassandra/lucene/builder/Builder.java
index 899708271..b5b7a8471 100644
--- a/builder/src/main/java/com/stratio/cassandra/lucene/builder/Builder.java
+++ b/builder/src/main/java/com/stratio/cassandra/lucene/builder/Builder.java
@@ -18,6 +18,7 @@
import com.stratio.cassandra.lucene.builder.common.GeoShape;
import com.stratio.cassandra.lucene.builder.common.GeoTransformation;
import com.stratio.cassandra.lucene.builder.index.Index;
+import com.stratio.cassandra.lucene.builder.index.Partitioner;
import com.stratio.cassandra.lucene.builder.index.schema.Schema;
import com.stratio.cassandra.lucene.builder.index.schema.analysis.ClasspathAnalyzer;
import com.stratio.cassandra.lucene.builder.index.schema.analysis.SnowballAnalyzer;
@@ -755,4 +756,32 @@ public static GeoShape.Union union(List shapes) {
public static GeoShape.Union union(String... shapes) {
return union(Stream.of(shapes).map(Builder::wkt).collect(Collectors.toList()));
}
+
+ /**
+ * Returns a new {@link Partitioner.None} to not partitioning the index.
+ *
+ * Index partitioning is useful to speed up some queries to the detriment of others, depending on the implementation.
+ * It is also useful to overcome the Lucene's hard limit of 2147483519 documents per index.
+ *
+ * @return a new no-action partitioning, equivalent to just don't partitioning the index
+ */
+ public static Partitioner nonePartitioner() {
+ return new Partitioner.None();
+ }
+
+ /**
+ * Returns a new {@link Partitioner.OnToken} to split the index in {@code numPartitions} based on the row token.
+ *
+ * Index partitioning is useful to speed up some queries to the detriment of others, depending on the implementation.
+ * It is also useful to overcome the Lucene's hard limit of 2147483519 documents per index.
+ *
+ * Partitioning on token guarantees a good load balancing between partitions while speeding up partition-directed
+ * searches to the detriment of token range searches.
+ *
+ * @param numPartitions the number of partitions
+ * @return a new partitioner based on Cassandra's partitioning token
+ */
+ public static Partitioner partitionerOnToken(int numPartitions) {
+ return new Partitioner.OnToken(numPartitions);
+ }
}
diff --git a/builder/src/main/java/com/stratio/cassandra/lucene/builder/index/Index.java b/builder/src/main/java/com/stratio/cassandra/lucene/builder/index/Index.java
index 85bd58fb7..0e2f6097a 100644
--- a/builder/src/main/java/com/stratio/cassandra/lucene/builder/index/Index.java
+++ b/builder/src/main/java/com/stratio/cassandra/lucene/builder/index/Index.java
@@ -40,6 +40,7 @@ public class Index extends JSONBuilder {
private Integer indexingThreads;
private Integer indexingQueuesSize;
private String excludedDataCenters;
+ private Partitioner partitioner;
/**
* Builds a new {@link Index} creation statement for the specified table and column.
@@ -209,6 +210,20 @@ public Index schema(Schema schema) {
return this;
}
+ /**
+ * Sets the {@link Partitioner}.
+ *
+ * Index partitioning is useful to speed up some queries to the detriment of others, depending on the implementation.
+ * It is also useful to overcome the Lucene's hard limit of 2147483519 documents per index.
+ *
+ * @param partitioner the {@link Partitioner}
+ * @return this with the specified partitioner
+ */
+ public Index partitioner(Partitioner partitioner) {
+ this.partitioner = partitioner;
+ return this;
+ }
+
/** {@inheritDoc} */
@Override
public String build() {
@@ -226,6 +241,7 @@ public String build() {
option(sb, "indexing_threads", indexingThreads);
option(sb, "indexing_queues_size", indexingQueuesSize);
option(sb, "excluded_data_centers", excludedDataCenters);
+ option(sb, "partitioner", partitioner);
sb.append(String.format("'schema':'%s'}", schema));
return sb.toString();
}
diff --git a/builder/src/main/java/com/stratio/cassandra/lucene/builder/index/Partitioner.java b/builder/src/main/java/com/stratio/cassandra/lucene/builder/index/Partitioner.java
new file mode 100644
index 000000000..2778c9b88
--- /dev/null
+++ b/builder/src/main/java/com/stratio/cassandra/lucene/builder/index/Partitioner.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.builder.index;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import com.stratio.cassandra.lucene.builder.JSONBuilder;
+import com.stratio.cassandra.lucene.builder.index.Partitioner.*;
+
+/**
+ * An index partitioner to split the index in multiple partitions.
+ *
+ * Index partitioning is useful to speed up some searches to the detriment of others, depending on the implementation.
+ * It is also useful to overcome the Lucene's hard limit of 2147483519 documents per index.
+ *
+ * @author Andres de la Pena {@literal }
+ */
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type", defaultImpl = None.class)
+@JsonSubTypes({@JsonSubTypes.Type(value = None.class, name = "none"),
+ @JsonSubTypes.Type(value = OnToken.class, name = "token")})
+public abstract class Partitioner extends JSONBuilder {
+
+ /**
+ * {@link Partitioner} with no action, equivalent to not defining a partitioner.
+ */
+ public static class None extends Partitioner {
+ }
+
+ /**
+ * {@link Partitioner} based on the Cassandra's partitioning token.
+ *
+ * Partitioning on token guarantees a good load balancing between partitions while speeding up partition-directed
+ * searches to the detriment of token range searches.
+ */
+ public static class OnToken extends Partitioner {
+
+ @JsonProperty("partitions")
+ public final int partitions;
+
+ public OnToken(int partitions) {
+ this.partitions = partitions;
+ }
+ }
+}
diff --git a/builder/src/test/java/com/stratio/cassandra/lucene/builder/BuilderTest.java b/builder/src/test/java/com/stratio/cassandra/lucene/builder/BuilderTest.java
index f063f3606..ca3f1c587 100644
--- a/builder/src/test/java/com/stratio/cassandra/lucene/builder/BuilderTest.java
+++ b/builder/src/test/java/com/stratio/cassandra/lucene/builder/BuilderTest.java
@@ -52,6 +52,7 @@ public void testIndexFull() {
.indexingThreads(4)
.indexingQueuesSize(100)
.excludedDataCenters("DC1,DC2")
+ .partitioner(partitionerOnToken(8))
.defaultAnalyzer("my_analyzer")
.analyzer("my_analyzer", classpathAnalyzer("my_class"))
.analyzer("snow", snowballAnalyzer("tartar").stopwords("a,b,c"))
@@ -60,9 +61,17 @@ public void testIndexFull() {
.build();
String expected = "CREATE CUSTOM INDEX idx ON keyspace.table(lucene) " +
"USING 'com.stratio.cassandra.lucene.Index' " +
- "WITH OPTIONS = {'refresh_seconds':'10.0','directory_path':'path','ram_buffer_mb':'64'," +
- "'max_merge_mb':'16','max_cached_mb':'32','indexing_threads':'4'," +
- "'indexing_queues_size':'100','excluded_data_centers':'DC1,DC2','schema':'{" +
+ "WITH OPTIONS = {" +
+ "'refresh_seconds':'10.0'," +
+ "'directory_path':'path'," +
+ "'ram_buffer_mb':'64'," +
+ "'max_merge_mb':'16'," +
+ "'max_cached_mb':'32'," +
+ "'indexing_threads':'4'," +
+ "'indexing_queues_size':'100'," +
+ "'excluded_data_centers':'DC1,DC2'," +
+ "'partitioner':'{\"type\":\"token\",\"partitions\":8}'," +
+ "'schema':'{" +
"\"default_analyzer\":\"my_analyzer\",\"analyzers\":{" +
"\"my_analyzer\":{\"type\":\"classpath\",\"class\":\"my_class\"}," +
"\"snow\":{\"type\":\"snowball\",\"language\":\"tartar\",\"stopwords\":\"a,b,c\"}}," +
@@ -71,6 +80,20 @@ public void testIndexFull() {
assertEquals("index serialization is wrong", expected, actual);
}
+ @Test
+ public void testNonePartitioner() {
+ String actual = nonePartitioner().build();
+ String expected = "{\"type\":\"none\"}";
+ assertEquals("none partitioner serialization is wrong", expected, actual);
+ }
+
+ @Test
+ public void testTokenPartitioner() {
+ String actual = partitionerOnToken(6).build();
+ String expected = "{\"type\":\"token\",\"partitions\":6}";
+ assertEquals("token partitioner serialization is wrong", expected, actual);
+ }
+
@Test
public void testBigDecimalMapperDefaults() {
String actual = bigDecimalMapper().build();
diff --git a/doc/documentation.rst b/doc/documentation.rst
index aa9bac416..0dab56321 100644
--- a/doc/documentation.rst
+++ b/doc/documentation.rst
@@ -11,6 +11,9 @@ Stratio's Cassandra Lucene Index
- `Example <#example>`__
- `Alternative syntaxes <#alternative-syntaxes>`__
- `Indexing <#indexing>`__
+ - `Partitioners <#partitioners>`__
+ - `None partitioner <#none-partitioner>`__
+ - `Token partitioner <#token-partitioner>`__
- `Analyzers <#analyzers>`__
- `Classpath analyzer <#classpath-analyzer>`__
- `Snowball analyzer <#snowball-analyzer>`__
@@ -244,7 +247,6 @@ and create them again with running newer version.
If you have huge amount of data in your cluster this could be an expensive task. We have tested it and here you have a
compatibility matrix that states between which versions it is not needed to delete the index:
-
+-----------+---------+---------+---------+---------+---------+---------+---------+---------+---------+---------+---------+---------+---------+---------+---------+---------+
| From\\ To | 3.0.3.0 | 3.0.3.1 | 3.0.4.0 | 3.0.4.1 | 3.0.5.0 | 3.5.0 | 3.5.1 | 3.5.2 | 3.6.0 | 3.7.0 | 3.7.1 | 3.7.2 | 3.7.3 | 3.8.0 | 3.8.1 | 3.8.2 |
+===========+=========+=========+=========+=========+=========+=========+=========+=========+=========+=========+=========+=========+=========+=========+=========+=========+
@@ -544,6 +546,7 @@ where is a JSON object:
('indexing_queues_size': '',)?
('directory_path': '',)?
('excluded_data_centers': '',)?
+ ('partitioner': '',)?
'schema': ''
};
@@ -565,6 +568,9 @@ All options take a value enclosed in single quotes:
- **excluded\_data\_centers**: The comma-separated list of the data centers
to be excluded. The index will be created on this data centers but all the
write operations will be silently ignored.
+- **partitioner**: The optional index `partitioner <#partitioners>`__. Index partitioning is useful
+ to speed up some searches to the detriment of others, depending on the implementation. It is also
+ useful to overcome the Lucene's hard limit of 2147483519 documents per index.
- **schema**: see below
.. code-block:: sql
@@ -589,6 +595,52 @@ Where default\_analyzer defaults to ‘org.apache.lucene.analysis.standard.Stand
type: "" (,
+
+ com.typesafe.scala-logging
+ scala-logging_${scala.binary.version}
+ ${scala.logging.version}
+
org.scalatest
scalatest_${scala.binary.version}
@@ -92,6 +99,16 @@
${jts.version}
provided
+
+ com.fasterxml.jackson.core
+ jackson-core
+ ${jackson.version}
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ ${jackson.version}
+
org.mockito
mockito-all
@@ -148,9 +165,11 @@
org.scala-lang:*
+ com.typesafe.scala-logging:*
org.apache.lucene:*
com.spatial4j:*
com.github.rholder:*
+ com.fasterxml.jackson.core:*
false
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/Builder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/Builder.java
similarity index 95%
rename from plugin/src/main/java/com/stratio/cassandra/lucene/util/Builder.java
rename to plugin/src/main/java/com/stratio/cassandra/lucene/common/Builder.java
index a743c2a65..74221e94c 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/Builder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/Builder.java
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.util;
+package com.stratio.cassandra.lucene.common;
/**
* Class for building complex objects.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/DateParser.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/DateParser.java
similarity index 94%
rename from plugin/src/main/java/com/stratio/cassandra/lucene/util/DateParser.java
rename to plugin/src/main/java/com/stratio/cassandra/lucene/common/DateParser.java
index ec222e65e..4fa5e7b32 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/DateParser.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/DateParser.java
@@ -13,12 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.util;
+package com.stratio.cassandra.lucene.common;
import com.stratio.cassandra.lucene.IndexException;
-import com.stratio.cassandra.lucene.column.Column;
-import org.apache.cassandra.db.marshal.SimpleDateType;
-import org.apache.cassandra.serializers.SimpleDateSerializer;
import org.apache.cassandra.utils.UUIDGen;
import java.text.DateFormat;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistance.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistance.java
index d541bb321..bca1e2e6c 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistance.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistance.java
@@ -15,10 +15,10 @@
*/
package com.stratio.cassandra.lucene.common;
+import com.fasterxml.jackson.annotation.JsonCreator;
import com.google.common.base.MoreObjects;
import com.spatial4j.core.distance.DistanceUtils;
import com.stratio.cassandra.lucene.IndexException;
-import org.codehaus.jackson.annotate.JsonCreator;
/**
* Class representing a geographical distance.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistanceUnit.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistanceUnit.java
index 694285969..286a0b7b8 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistanceUnit.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoDistanceUnit.java
@@ -15,7 +15,7 @@
*/
package com.stratio.cassandra.lucene.common;
-import org.codehaus.jackson.annotate.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonCreator;
/**
* Enum representing a spatial distance unit.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoOperation.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoOperation.java
index a8a5dd086..e23fd9459 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoOperation.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoOperation.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.common;
+import com.fasterxml.jackson.annotation.JsonCreator;
import com.stratio.cassandra.lucene.IndexException;
import org.apache.lucene.spatial.query.SpatialOperation;
-import org.codehaus.jackson.annotate.JsonCreator;
/**
* Enum representing a spatial operation.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoShape.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoShape.java
index 4648a68b9..d825dd2f7 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoShape.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoShape.java
@@ -15,20 +15,20 @@
*/
package com.stratio.cassandra.lucene.common;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.google.common.base.MoreObjects;
import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.jts.JtsGeometry;
import com.vividsolutions.jts.geom.Geometry;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.annotate.JsonSubTypes;
-import org.codehaus.jackson.annotate.JsonTypeInfo;
import java.util.Arrays;
import java.util.List;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.CONTEXT;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
/**
* Class representing the transformation of a JTS geographical shape into a new shape.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoTransformation.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoTransformation.java
index 8c38e3c37..592a2ae1b 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoTransformation.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeoTransformation.java
@@ -15,16 +15,16 @@
*/
package com.stratio.cassandra.lucene.common;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.google.common.base.MoreObjects;
import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.jts.JtsGeometry;
import com.vividsolutions.jts.geom.Geometry;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.annotate.JsonSubTypes;
-import org.codehaus.jackson.annotate.JsonTypeInfo;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.CONTEXT;
/**
* Class representing the transformation of a JTS geographical shape into a new shape.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/GeospatialUtils.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeospatialUtils.java
similarity index 98%
rename from plugin/src/main/java/com/stratio/cassandra/lucene/util/GeospatialUtils.java
rename to plugin/src/main/java/com/stratio/cassandra/lucene/common/GeospatialUtils.java
index 093ab1c42..5467d57fe 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/GeospatialUtils.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeospatialUtils.java
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.util;
+package com.stratio.cassandra.lucene.common;
import com.spatial4j.core.context.SpatialContext;
import com.stratio.cassandra.lucene.IndexException;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/GeospatialUtilsJTS.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeospatialUtilsJTS.java
similarity index 98%
rename from plugin/src/main/java/com/stratio/cassandra/lucene/util/GeospatialUtilsJTS.java
rename to plugin/src/main/java/com/stratio/cassandra/lucene/common/GeospatialUtilsJTS.java
index 3eb9b356a..31ab621c0 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/GeospatialUtilsJTS.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/GeospatialUtilsJTS.java
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.util;
+package com.stratio.cassandra.lucene.common;
import com.spatial4j.core.context.jts.JtsSpatialContext;
import com.spatial4j.core.shape.jts.JtsGeometry;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/JsonSerializer.java b/plugin/src/main/java/com/stratio/cassandra/lucene/common/JsonSerializer.java
similarity index 76%
rename from plugin/src/main/java/com/stratio/cassandra/lucene/util/JsonSerializer.java
rename to plugin/src/main/java/com/stratio/cassandra/lucene/common/JsonSerializer.java
index 70124baa8..50e2a8bc7 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/JsonSerializer.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/common/JsonSerializer.java
@@ -13,14 +13,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.util;
+package com.stratio.cassandra.lucene.common;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.map.DeserializationConfig;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.SerializationConfig;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
@@ -40,10 +40,10 @@ public final class JsonSerializer {
private JsonSerializer() {
mapper.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, false);
mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
- mapper.configure(SerializationConfig.Feature.AUTO_DETECT_IS_GETTERS, false);
- mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, true);
- mapper.configure(DeserializationConfig.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true);
- mapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
+ mapper.configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false);
+ mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true);
+ mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true);
+ mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
}
/**
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/Schema.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/Schema.java
index 7d8326ba6..c36048122 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/Schema.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/Schema.java
@@ -46,7 +46,10 @@ public class Schema implements Closeable {
public final Map mappers;
/** The wrapping all-in-one {@link Analyzer}. */
- private final SchemaAnalyzer analyzer;
+ public final SchemaAnalyzer analyzer;
+
+ /** The default {@link Analyzer}. */
+ public final Analyzer defaultAnalyzer;
/** The names of the mapped cells. */
private final Set mappedCells;
@@ -60,32 +63,15 @@ public class Schema implements Closeable {
*/
public Schema(Analyzer defaultAnalyzer, Map mappers, Map analyzers) {
this.mappers = mappers;
+ this.defaultAnalyzer = defaultAnalyzer;
this.analyzer = new SchemaAnalyzer(defaultAnalyzer, analyzers, mappers);
mappedCells = mappers.values()
.stream()
.flatMap(x -> x.mappedColumns.stream())
- .map(x -> Column.parse(x).cellName())
+ .map(Column::parseCellName)
.collect(Collectors.toSet());
}
- /**
- * Returns the used {@link Analyzer}.
- *
- * @return the used {@link Analyzer}
- */
- public Analyzer analyzer() {
- return analyzer;
- }
-
- /**
- * Returns the default {@link Analyzer}.
- *
- * @return the default {@link Analyzer}
- */
- public Analyzer defaultAnalyzer() {
- return analyzer.getDefaultAnalyzer().analyzer();
- }
-
/**
* Returns the {@link Analyzer} identified by the specified field name.
*
@@ -103,7 +89,7 @@ public Analyzer analyzer(String fieldName) {
* @return the mapper, or {@code null} if not found.
*/
public Mapper mapper(String field) {
- String mapperName = Column.parse(field).mapperName();
+ String mapperName = Column.parseMapperName(field);
return mappers.get(mapperName);
}
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaAnalyzer.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaAnalyzer.java
index c97a0c6e7..a15c3e7cc 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaAnalyzer.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaAnalyzer.java
@@ -35,6 +35,7 @@
* @author Andres de la Pena {@literal }
*/
public class SchemaAnalyzer extends DelegatingAnalyzerWrapper {
+
private final TokenLengthAnalyzer defaultAnalyzer;
private final Map fieldAnalyzers;
@@ -107,7 +108,7 @@ public TokenLengthAnalyzer getAnalyzer(String fieldName) {
if (StringUtils.isBlank(fieldName)) {
throw new IllegalArgumentException("Not empty analyzer name required");
}
- String name = Column.parse(fieldName).mapperName();
+ String name = Column.parseMapperName(fieldName);
TokenLengthAnalyzer analyzer = fieldAnalyzers.get(name);
if (analyzer != null) {
return analyzer;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaBuilder.java
index 808fd4dcf..5f5e02b98 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/SchemaBuilder.java
@@ -15,16 +15,16 @@
*/
package com.stratio.cassandra.lucene.schema;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.IndexException;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import com.stratio.cassandra.lucene.schema.analysis.AnalyzerBuilder;
import com.stratio.cassandra.lucene.schema.analysis.ClasspathAnalyzerBuilder;
import com.stratio.cassandra.lucene.schema.analysis.StandardAnalyzers;
import com.stratio.cassandra.lucene.schema.mapping.Mapper;
import com.stratio.cassandra.lucene.schema.mapping.builder.MapperBuilder;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
import org.apache.lucene.analysis.Analyzer;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
import java.io.IOException;
import java.util.LinkedHashMap;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/AnalyzerBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/AnalyzerBuilder.java
index cbe7dc302..87e89c582 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/AnalyzerBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/AnalyzerBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.schema.analysis;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.lucene.analysis.Analyzer;
-import org.codehaus.jackson.annotate.JsonSubTypes;
-import org.codehaus.jackson.annotate.JsonTypeInfo;
/**
* An Lucene {@link Analyzer} builder.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilder.java
index 4bb1d801f..21ef8c6a2 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilder.java
@@ -15,10 +15,10 @@
*/
package com.stratio.cassandra.lucene.schema.analysis;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.IndexException;
import org.apache.lucene.analysis.Analyzer;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
import java.lang.reflect.Constructor;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilder.java
index 4bfc94d6f..c27a811cc 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilder.java
@@ -15,6 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.analysis;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.IndexException;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.analysis.Analyzer;
@@ -25,8 +27,6 @@
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.util.CharArraySet;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
import java.util.ArrayList;
import java.util.List;
@@ -51,8 +51,8 @@ public class SnowballAnalyzerBuilder extends AnalyzerBuilder {
* Builds a new {@link SnowballAnalyzerBuilder} for the specified language and stopwords.
*
* @param language The language. The supported languages are English, French, Spanish, Portuguese, Italian,
- * Romanian, German, Dutch, Swedish, Norwegian, Danish, Russian, Finnish, Hungarian and Turkish.
- * Basque and Catalan.
+ * Romanian, German, Dutch, Swedish, Norwegian, Danish, Russian, Finnish, Hungarian and Turkish. Basque and
+ * Catalan.
* @param stopwords the comma separated stopwords list.
*/
@JsonCreator
@@ -138,7 +138,7 @@ public SnowballAnalyzer(String language, CharArraySet stopwords) {
/** {@inheritDoc} */
@Override
- protected TokenStreamComponents createComponents(String fieldName) {
+ protected Analyzer.TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer();
TokenStream result = new StandardFilter(source);
result = new LowerCaseFilter(result);
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballFilter.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballFilter.java
index ef514fc94..2eeaca3fb 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballFilter.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/analysis/SnowballFilter.java
@@ -15,16 +15,16 @@
*/
package com.stratio.cassandra.lucene.schema.analysis;
-import java.io.IOException;
-import java.util.Arrays;
-
import com.stratio.cassandra.lucene.IndexException;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.tokenattributes.KeywordAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+import org.apache.lucene.analysis.tokenattributes.KeywordAttribute;
import org.tartarus.snowball.SnowballStemmer;
+import java.io.IOException;
+import java.util.Arrays;
+
/**
* Version of {@link org.apache.lucene.analysis.snowball.SnowballFilter} modified to be compatible with
* {@link org.tartarus.snowball.SnowballStemmer} 1.3.0.581.1, imposed by SASI indexes.
@@ -46,8 +46,8 @@ public final class SnowballFilter extends TokenFilter {
*
* @param stream the input tokens stream to be stemmed
* @param language The language. The supported languages are English, French, Spanish, Portuguese, Italian,
- * Romanian, German, Dutch, Swedish, Norwegian, Danish, Russian, Finnish, Hungarian and Turkish.
- * Basque and Catalan.
+ * Romanian, German, Dutch, Swedish, Norwegian, Danish, Russian, Finnish, Hungarian and Turkish. Basque and
+ * Catalan.
*/
public SnowballFilter(TokenStream stream, String language) {
super(stream);
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapper.java
index 202ba4141..b902c2553 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapper.java
@@ -17,9 +17,8 @@
import com.google.common.base.MoreObjects;
import com.stratio.cassandra.lucene.IndexException;
-import com.stratio.cassandra.lucene.column.Column;
import com.stratio.cassandra.lucene.column.Columns;
-import com.stratio.cassandra.lucene.util.DateParser;
+import com.stratio.cassandra.lucene.common.DateParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.document.LongField;
import org.apache.lucene.index.IndexableField;
@@ -168,25 +167,11 @@ private void validate(BitemporalDateTime vtFrom,
* Returns a {@link BitemporalDateTime} read from the specified {@link Columns}.
*
* @param columns the column where the data is
- * @param fieldName the name of the field to be read from {@code columns}
+ * @param field the name of the field to be read from {@code columns}
* @return a bitemporal date time
*/
- BitemporalDateTime readBitemporalDate(Columns columns, String fieldName) {
- Column> column = columns.withFieldName(fieldName).head();
- if (column == null) {
- return null;
- }
- return parseBitemporalDate(column.value().getOrElse(null));
- }
-
- private BitemporalDateTime checkIfNow(Long in) {
- if (in > nowValue) {
- throw new IndexException("BitemporalDateTime value '{}' exceeds Max Value: '{}'", in, nowValue);
- } else if (in < nowValue) {
- return new BitemporalDateTime(in);
- } else {
- return new BitemporalDateTime(Long.MAX_VALUE);
- }
+ BitemporalDateTime readBitemporalDate(Columns columns, String field) {
+ return parseBitemporalDate(columns.valueForField(field));
}
/**
@@ -201,6 +186,16 @@ public BitemporalDateTime parseBitemporalDate(Object value) {
return date == null ? null : checkIfNow(date.getTime());
}
+ private BitemporalDateTime checkIfNow(Long in) {
+ if (in > nowValue) {
+ throw new IndexException("BitemporalDateTime value '{}' exceeds Max Value: '{}'", in, nowValue);
+ } else if (in < nowValue) {
+ return new BitemporalDateTime(in);
+ } else {
+ return new BitemporalDateTime(Long.MAX_VALUE);
+ }
+ }
+
/** {@inheritDoc} */
@Override
public SortField sortField(String name, boolean reverse) {
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateMapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateMapper.java
index 0201c30ff..13c68a888 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateMapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateMapper.java
@@ -15,8 +15,7 @@
*/
package com.stratio.cassandra.lucene.schema.mapping;
-import com.stratio.cassandra.lucene.column.Column;
-import com.stratio.cassandra.lucene.util.DateParser;
+import com.stratio.cassandra.lucene.common.DateParser;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.SortedNumericDocValuesField;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapper.java
index 13f2e9985..1143311eb 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapper.java
@@ -17,9 +17,8 @@
import com.google.common.base.MoreObjects;
import com.stratio.cassandra.lucene.IndexException;
-import com.stratio.cassandra.lucene.column.Column;
import com.stratio.cassandra.lucene.column.Columns;
-import com.stratio.cassandra.lucene.util.DateParser;
+import com.stratio.cassandra.lucene.common.DateParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
@@ -28,7 +27,6 @@
import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.NRShape;
import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape;
-import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
@@ -137,15 +135,7 @@ public NRShape makeShape(Date from, Date to) {
* @return the start date
*/
Date readFrom(Columns columns) {
- Column> column = columns.withFieldName(from).head();
- if (column == null) {
- return null;
- }
- Date fromDate = parser.parse(column.value().getOrElse(null));
- if (fromDate == null) {
- throw new IndexException("From date required");
- }
- return fromDate;
+ return parser.parse(columns.valueForField(from));
}
/**
@@ -155,15 +145,7 @@ Date readFrom(Columns columns) {
* @return the end date
*/
Date readTo(Columns columns) {
- Column> column = columns.withFieldName(to).head();
- if (column == null) {
- return null;
- }
- Date toDate = parser.parse(column.value().getOrElse(null));
- if (toDate == null) {
- throw new IndexException("To date required");
- }
- return toDate;
+ return parser.parse(columns.valueForField(to));
}
/**
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/FloatMapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/FloatMapper.java
index 578e9a72b..5301d8d36 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/FloatMapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/FloatMapper.java
@@ -15,6 +15,7 @@
*/
package com.stratio.cassandra.lucene.schema.mapping;
+import com.fasterxml.jackson.annotation.JsonCreator;
import com.stratio.cassandra.lucene.IndexException;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatField;
@@ -23,7 +24,6 @@
import org.apache.lucene.search.SortField.Type;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.util.NumericUtils;
-import org.codehaus.jackson.annotate.JsonCreator;
import java.util.Optional;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapper.java
index 16870812a..85ee5c1ef 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapper.java
@@ -18,9 +18,8 @@
import com.google.common.base.MoreObjects;
import com.spatial4j.core.shape.Point;
import com.stratio.cassandra.lucene.IndexException;
-import com.stratio.cassandra.lucene.column.Column;
import com.stratio.cassandra.lucene.column.Columns;
-import com.stratio.cassandra.lucene.util.GeospatialUtils;
+import com.stratio.cassandra.lucene.common.GeospatialUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
@@ -29,13 +28,12 @@
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.apache.lucene.spatial.serialized.SerializedDVStrategy;
-import scala.Option;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import static com.stratio.cassandra.lucene.util.GeospatialUtils.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtils.CONTEXT;
/**
* A {@link Mapper} to map geographical points.
@@ -124,8 +122,8 @@ public SortField sortField(String name, boolean reverse) {
* @return the validated latitude
*/
Double readLatitude(Columns columns) {
- Column> column = columns.withFieldName(latitude).head();
- return column == null ? null : readLatitude(column.value());
+ Object value = columns.valueForField(latitude);
+ return value == null ? null : readLatitude(value);
}
/**
@@ -136,8 +134,8 @@ Double readLatitude(Columns columns) {
* @return the validated longitude
*/
Double readLongitude(Columns columns) {
- Column> column = columns.withFieldName(longitude).head();
- return column == null ? null : readLongitude(column.value());
+ Object value = columns.valueForField(longitude);
+ return value == null ? null : readLongitude(value);
}
/**
@@ -145,15 +143,12 @@ Double readLongitude(Columns columns) {
*
* A valid latitude must in the range [-90, 90].
*
- * @param option the {@link Object} containing the latitude
+ * @param o the {@link Object} containing the latitude
* @return the latitude
*/
- private static Double readLatitude(Option option) {
+ private static Double readLatitude(Object o) {
+ if (o == null) return null;
Double value;
- if (option == null || option.isEmpty()) {
- return null;
- }
- Object o = option.get();
if (o instanceof Number) {
value = ((Number) o).doubleValue();
} else {
@@ -171,15 +166,12 @@ private static Double readLatitude(Option option) {
*
* A valid longitude must in the range [-180, 180].
*
- * @param option the {@link Object} containing the latitude
+ * @param o the {@link Object} containing the latitude
* @return the longitude
*/
- private static Double readLongitude(Option option) {
+ private static Double readLongitude(Object o) {
+ if (o == null) return null;
Double value;
- if (option == null || option.isEmpty()) {
- return null;
- }
- Object o = option.get();
if (o instanceof Number) {
value = ((Number) o).doubleValue();
} else {
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapper.java
index d5a155b3c..07e8d12b1 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapper.java
@@ -19,7 +19,7 @@
import com.spatial4j.core.shape.jts.JtsGeometry;
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.common.GeoTransformation;
-import com.stratio.cassandra.lucene.util.GeospatialUtils;
+import com.stratio.cassandra.lucene.common.GeospatialUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
@@ -33,8 +33,8 @@
import java.util.Collections;
import java.util.List;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.CONTEXT;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
/**
* A {@link Mapper} to map geographical shapes represented according to the
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/Mapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/Mapper.java
index 9b2b096ca..3bed1eb50 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/Mapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/Mapper.java
@@ -109,7 +109,7 @@ protected Mapper(String field,
this.validated = validated == null ? DEFAULT_VALIDATED : validated;
this.analyzer = analyzer;
this.mappedColumns = mappedColumns.stream().filter(x -> x != null).collect(toList()); // Remove nulls
- this.mappedCells = this.mappedColumns.stream().map(Column::parse).map(Column::cellName).collect(toList());
+ this.mappedCells = this.mappedColumns.stream().map(Column::parseCellName).collect(toList());
this.supportedTypes = supportedTypes;
}
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/SingleColumnMapper.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/SingleColumnMapper.java
index 5affc8d0a..9d7ca4604 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/SingleColumnMapper.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/SingleColumnMapper.java
@@ -81,15 +81,13 @@ public String getColumn() {
@Override
public List indexableFields(Columns columns) {
List fields = new LinkedList<>();
- for (Column c : columns.withMapperName(column)) {
- fields.addAll(indexableFields(c));
- }
+ columns.foreachWithMapper(column, c -> fields.addAll(indexableFields(c)));
return fields;
}
- private List indexableFields(Column> c) {
- String name = column.equals(field) ? c.fieldName() : c.fieldName(field);
- K value = c.value().getOrElse(null);
+ private List indexableFields(Column c) {
+ String name = column.equals(field) ? c.field() : c.fieldName(field);
+ Object value = c.valueOrNull();
if (value != null) {
T base = base(c);
return indexableFields(name, base);
@@ -121,17 +119,16 @@ public final T base(String field, Object value) {
* Returns the {@link Column} query value resulting from the mapping of the specified object.
*
* @param column the column
- * @param the base type of the column
* @return the {@link Column} index value resulting from the mapping of the specified object
*/
- public final T base(Column column) {
- return column == null ? null : column.value().getOrElse(null) == null ? null : doBase(column);
+ public final T base(Column column) {
+ return column == null ? null : column.valueOrNull() == null ? null : doBase(column);
}
protected abstract T doBase(@NotNull String field, @NotNull Object value);
- protected final T doBase(Column column) {
- return doBase(column.fieldName(field), column.value().getOrElse(null));
+ protected final T doBase(Column column) {
+ return doBase(column.fieldName(field), column.valueOrNull());
}
/** {@inheritDoc} */
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigDecimalMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigDecimalMapperBuilder.java
index 777875408..e200c1138 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigDecimalMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigDecimalMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.BigDecimalMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link BigDecimalMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigIntegerMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigIntegerMapperBuilder.java
index f41eebdf1..36d0008be 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigIntegerMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BigIntegerMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.BigIntegerMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link BigIntegerMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BitemporalMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BitemporalMapperBuilder.java
index 9550f83e0..2977fdf89 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BitemporalMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/BitemporalMapperBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.BitemporalMapper;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link MapperBuilder} to build a new {@link BitemporalMapperBuilder}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateMapperBuilder.java
index ed1686067..ef7a4ce29 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.DateMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link DateMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateRangeMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateRangeMapperBuilder.java
index f68d5194b..7504be3c6 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateRangeMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DateRangeMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.DateRangeMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link MapperBuilder} to build a new {@link DateRangeMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DoubleMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DoubleMapperBuilder.java
index 49eb1e201..b10bc4aa0 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DoubleMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/DoubleMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.DoubleMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link DoubleMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/FloatMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/FloatMapperBuilder.java
index ee514231e..430120105 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/FloatMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/FloatMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.FloatMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link FloatMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoPointMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoPointMapperBuilder.java
index 7e036b565..c1c522eb6 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoPointMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoPointMapperBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.GeoPointMapper;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link MapperBuilder} to build a new {@link GeoPointMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoShapeMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoShapeMapperBuilder.java
index cee3f4d09..a7261167b 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoShapeMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/GeoShapeMapperBuilder.java
@@ -15,11 +15,11 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.common.GeoTransformation;
import com.stratio.cassandra.lucene.common.JTSNotFoundException;
import com.stratio.cassandra.lucene.schema.mapping.GeoPointMapper;
import com.stratio.cassandra.lucene.schema.mapping.GeoShapeMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
import java.util.Arrays;
import java.util.List;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/IntegerMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/IntegerMapperBuilder.java
index d88725d98..14c1ad3a2 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/IntegerMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/IntegerMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.IntegerMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link IntegerMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/LongMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/LongMapperBuilder.java
index 47af84a39..d1ac581ef 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/LongMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/LongMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.LongMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link LongMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/MapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/MapperBuilder.java
index 4830dfd77..873dc3aeb 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/MapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/MapperBuilder.java
@@ -15,10 +15,10 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.stratio.cassandra.lucene.schema.mapping.Mapper;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.annotate.JsonSubTypes;
-import org.codehaus.jackson.annotate.JsonTypeInfo;
/**
* Abstract builder for creating new {@link Mapper}s.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/SingleColumnMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/SingleColumnMapperBuilder.java
index 6e642e4f5..dc86b7e43 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/SingleColumnMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/SingleColumnMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.SingleColumnMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* Abstract {@link MapperBuilder} for creating new {@link SingleColumnMapper}s.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/StringMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/StringMapperBuilder.java
index 478b6ffa3..a207b5e48 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/StringMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/StringMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.StringMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link StringMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/TextMapperBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/TextMapperBuilder.java
index e57bf16a1..d5bca5602 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/TextMapperBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/schema/mapping/builder/TextMapperBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.schema.mapping.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.schema.mapping.TextMapper;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link SingleColumnMapperBuilder} to build a new {@link TextMapper}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilder.java
index c5c1e8b3d..ffb21f9d4 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilder.java
@@ -15,14 +15,14 @@
*/
package com.stratio.cassandra.lucene.search;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.IndexPagingState;
+import com.stratio.cassandra.lucene.common.Builder;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import com.stratio.cassandra.lucene.search.condition.builder.ConditionBuilder;
import com.stratio.cassandra.lucene.search.sort.builder.SortFieldBuilder;
-import com.stratio.cassandra.lucene.util.Builder;
import com.stratio.cassandra.lucene.util.ByteBufferUtils;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
-import org.codehaus.jackson.annotate.JsonProperty;
import java.io.IOException;
import java.util.Arrays;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilderLegacy.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilderLegacy.java
index 2134cba42..5c46e19cf 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilderLegacy.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/SearchBuilderLegacy.java
@@ -15,13 +15,13 @@
*/
package com.stratio.cassandra.lucene.search;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.IndexPagingState;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import com.stratio.cassandra.lucene.search.condition.builder.ConditionBuilder;
import com.stratio.cassandra.lucene.search.sort.builder.SortFieldBuilder;
import com.stratio.cassandra.lucene.util.ByteBufferUtils;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
-import org.codehaus.jackson.annotate.JsonProperty;
import java.io.IOException;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoBBoxCondition.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoBBoxCondition.java
index 6e8005411..aefddc057 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoBBoxCondition.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoBBoxCondition.java
@@ -18,14 +18,14 @@
import com.google.common.base.MoreObjects;
import com.spatial4j.core.shape.Rectangle;
import com.stratio.cassandra.lucene.IndexException;
+import com.stratio.cassandra.lucene.common.GeospatialUtils;
import com.stratio.cassandra.lucene.schema.mapping.GeoPointMapper;
-import com.stratio.cassandra.lucene.util.GeospatialUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
-import static com.stratio.cassandra.lucene.util.GeospatialUtils.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtils.CONTEXT;
/**
* A {@link Condition} that matches documents containing a shape contained in a certain bounding box.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoDistanceCondition.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoDistanceCondition.java
index d82965d2c..f41e3905e 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoDistanceCondition.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/GeoDistanceCondition.java
@@ -21,8 +21,8 @@
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.common.GeoDistance;
import com.stratio.cassandra.lucene.common.GeoDistanceUnit;
+import com.stratio.cassandra.lucene.common.GeospatialUtils;
import com.stratio.cassandra.lucene.schema.mapping.GeoPointMapper;
-import com.stratio.cassandra.lucene.util.GeospatialUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
@@ -30,7 +30,7 @@
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
-import static com.stratio.cassandra.lucene.util.GeospatialUtils.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtils.CONTEXT;
import static org.apache.lucene.search.BooleanClause.Occur.FILTER;
import static org.apache.lucene.search.BooleanClause.Occur.MUST_NOT;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/LuceneCondition.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/LuceneCondition.java
index 934b23978..c67e1c128 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/LuceneCondition.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/LuceneCondition.java
@@ -78,7 +78,7 @@ public Set postProcessingFields() {
@Override
public Query doQuery(Schema schema) {
try {
- Analyzer analyzer = schema.analyzer();
+ Analyzer analyzer = schema.analyzer;
QueryParser queryParser = new QueryParser(defaultField, analyzer);
queryParser.setAllowLeadingWildcard(true);
queryParser.setLowercaseExpandedTerms(false);
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleColumnCondition.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleColumnCondition.java
index ee09cb27c..7ec49d360 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleColumnCondition.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleColumnCondition.java
@@ -56,7 +56,7 @@ public final Query doQuery(Schema schema) {
SingleColumnMapper.class.getSimpleName(),
mapper);
}
- return doQuery((SingleColumnMapper>) mapper, schema.analyzer());
+ return doQuery((SingleColumnMapper>) mapper, schema.analyzer);
}
/**
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleMapperCondition.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleMapperCondition.java
index 3997a0b5c..626b1e896 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleMapperCondition.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/SingleMapperCondition.java
@@ -58,7 +58,7 @@ public final Query doQuery(Schema schema) {
} else if (!type.isAssignableFrom(mapper.getClass())) {
throw new IndexException("Field '{}' requires a mapper of type '{}' but found '{}'", field, type, mapper);
}
- return doQuery((T) mapper, schema.analyzer());
+ return doQuery((T) mapper, schema.analyzer);
}
/**
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/AllConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/AllConditionBuilder.java
index 2dfdc9d57..b41f73561 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/AllConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/AllConditionBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
import com.stratio.cassandra.lucene.search.condition.AllCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
/**
* {@link ConditionBuilder} for building a new {@link AllCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BitemporalConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BitemporalConditionBuilder.java
index 69346592e..47555ebb9 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BitemporalConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BitemporalConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.BitemporalCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link BitemporalCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BooleanConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BooleanConditionBuilder.java
index 0ec99664b..740732d37 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BooleanConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/BooleanConditionBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.BooleanCondition;
-import org.codehaus.jackson.annotate.JsonProperty;
import java.util.Arrays;
import java.util.LinkedList;
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ConditionBuilder.java
index 3c101b965..8ae7b9cc6 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ConditionBuilder.java
@@ -15,11 +15,11 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import com.stratio.cassandra.lucene.common.Builder;
import com.stratio.cassandra.lucene.search.condition.Condition;
-import com.stratio.cassandra.lucene.util.Builder;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.annotate.JsonSubTypes;
-import org.codehaus.jackson.annotate.JsonTypeInfo;
/**
* {@link Builder} for creating new {@link Condition}s.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ContainsConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ContainsConditionBuilder.java
index d7682979a..9f90ab192 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ContainsConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/ContainsConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.ContainsCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link ContainsCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/DateRangeConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/DateRangeConditionBuilder.java
index 4efe572c8..75abddccd 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/DateRangeConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/DateRangeConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.DateRangeCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link DateRangeCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/FuzzyConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/FuzzyConditionBuilder.java
index 2a9c3534e..c1ba3ebf9 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/FuzzyConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/FuzzyConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.FuzzyCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link FuzzyCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoBBoxConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoBBoxConditionBuilder.java
index 298d755d9..d4ccd85da 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoBBoxConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoBBoxConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.GeoBBoxCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link GeoBBoxCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoDistanceConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoDistanceConditionBuilder.java
index 149f51289..37213a4aa 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoDistanceConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoDistanceConditionBuilder.java
@@ -15,10 +15,10 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.common.GeoDistance;
import com.stratio.cassandra.lucene.search.condition.GeoDistanceCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link GeoDistanceCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoShapeConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoShapeConditionBuilder.java
index fa1419d50..2c1f33dc9 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoShapeConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/GeoShapeConditionBuilder.java
@@ -15,13 +15,13 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.common.GeoOperation;
import com.stratio.cassandra.lucene.common.GeoShape;
import com.stratio.cassandra.lucene.common.JTSNotFoundException;
import com.stratio.cassandra.lucene.search.condition.GeoBBoxCondition;
import com.stratio.cassandra.lucene.search.condition.GeoShapeCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link GeoShapeCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/LuceneConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/LuceneConditionBuilder.java
index fa41b91fb..4daf92ae0 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/LuceneConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/LuceneConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.LuceneCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link LuceneCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/MatchConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/MatchConditionBuilder.java
index 115920c44..434ac492e 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/MatchConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/MatchConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.MatchCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link MatchCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/NoneConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/NoneConditionBuilder.java
index fa75ab85e..105fbe7e5 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/NoneConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/NoneConditionBuilder.java
@@ -15,8 +15,8 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
import com.stratio.cassandra.lucene.search.condition.NoneCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
/**
* {@link ConditionBuilder} for building a new {@link NoneCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PhraseConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PhraseConditionBuilder.java
index 0559f4d5b..875c8646a 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PhraseConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PhraseConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.PhraseCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link PhraseCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PrefixConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PrefixConditionBuilder.java
index 527703391..738f623de 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PrefixConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/PrefixConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.PrefixCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link PrefixCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RangeConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RangeConditionBuilder.java
index 5967cd2a3..ee21523a8 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RangeConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RangeConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.RangeCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link RangeCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RegexpConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RegexpConditionBuilder.java
index f87e4b47b..ed2b8292d 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RegexpConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/RegexpConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.RegexpCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link RegexpCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/WildcardConditionBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/WildcardConditionBuilder.java
index 6bcc51c85..8a0b5a74f 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/WildcardConditionBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/condition/builder/WildcardConditionBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.condition.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.condition.WildcardCondition;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* {@link ConditionBuilder} for building a new {@link WildcardCondition}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/GeoDistanceSortField.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/GeoDistanceSortField.java
index 762c000f9..94b9aaf32 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/GeoDistanceSortField.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/GeoDistanceSortField.java
@@ -19,10 +19,10 @@
import com.spatial4j.core.distance.DistanceUtils;
import com.spatial4j.core.shape.Point;
import com.stratio.cassandra.lucene.IndexException;
+import com.stratio.cassandra.lucene.common.GeospatialUtils;
import com.stratio.cassandra.lucene.schema.Schema;
import com.stratio.cassandra.lucene.schema.mapping.GeoPointMapper;
import com.stratio.cassandra.lucene.schema.mapping.Mapper;
-import com.stratio.cassandra.lucene.util.GeospatialUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.spatial.SpatialStrategy;
@@ -30,7 +30,7 @@
import java.util.Collections;
import java.util.Set;
-import static com.stratio.cassandra.lucene.util.GeospatialUtils.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtils.CONTEXT;
/**
* {@link SortField} to sort geo points by their distance to a fixed reference point.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilder.java
index 6f3f98ae6..f3967ff5e 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.sort.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.sort.GeoDistanceSortField;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* @author Eduardo Alonso {@literal }
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilder.java
index d2bfbb803..f8a516a85 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilder.java
@@ -15,9 +15,9 @@
*/
package com.stratio.cassandra.lucene.search.sort.builder;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import com.stratio.cassandra.lucene.search.sort.SimpleSortField;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
/**
* @author Eduardo Alonso {@literal }
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SortFieldBuilder.java b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SortFieldBuilder.java
index 11ffb037f..2219c9271 100644
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SortFieldBuilder.java
+++ b/plugin/src/main/java/com/stratio/cassandra/lucene/search/sort/builder/SortFieldBuilder.java
@@ -15,11 +15,11 @@
*/
package com.stratio.cassandra.lucene.search.sort.builder;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import com.stratio.cassandra.lucene.common.Builder;
import com.stratio.cassandra.lucene.search.sort.SortField;
-import com.stratio.cassandra.lucene.util.Builder;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.annotate.JsonSubTypes;
-import org.codehaus.jackson.annotate.JsonTypeInfo;
/**
* {@link Builder} for building a new {@link SortField}.
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/ByteBufferUtils.java b/plugin/src/main/java/com/stratio/cassandra/lucene/util/ByteBufferUtils.java
deleted file mode 100755
index 98275036a..000000000
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/ByteBufferUtils.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Copyright (C) 2014 Stratio (http://stratio.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.stratio.cassandra.lucene.util;
-
-import org.apache.cassandra.db.marshal.AbstractType;
-import org.apache.cassandra.db.marshal.CompositeType;
-import org.apache.cassandra.utils.ByteBufferUtil;
-import org.apache.cassandra.utils.Hex;
-import org.apache.lucene.util.BytesRef;
-
-import java.nio.ByteBuffer;
-import java.util.List;
-
-/**
- * Utility class with some {@link ByteBuffer}/{@link AbstractType} utilities.
- *
- * @author Andres de la Pena {@literal }
- */
-public final class ByteBufferUtils {
-
- /** Private constructor to hide the implicit public one. */
- private ByteBufferUtils() {
- }
-
- /**
- * Returns the specified {@link ByteBuffer} as a byte array.
- *
- * @param byteBuffer a {@link ByteBuffer} to be converted to a byte array
- * @return the byte array representation of the {@code byteBuffer}
- */
- public static byte[] asArray(ByteBuffer byteBuffer) {
- ByteBuffer bb = ByteBufferUtil.clone(byteBuffer);
- byte[] bytes = new byte[bb.remaining()];
- bb.get(bytes);
- return bytes;
- }
-
- /**
- * Returns {@code true} if the specified {@link ByteBuffer} is empty, {@code false} otherwise.
- *
- * @param byteBuffer the byte buffer
- * @return {@code true} if the specified {@link ByteBuffer} is empty, {@code false} otherwise.
- */
- public static boolean isEmpty(ByteBuffer byteBuffer) {
- return byteBuffer.remaining() == 0;
- }
-
- /**
- * Returns the {@link ByteBuffer}s contained in {@code byteBuffer} according to {@code type}.
- *
- * @param byteBuffer the byte buffer to be split
- * @param type the {@link AbstractType} of {@code byteBuffer}
- * @return the {@link ByteBuffer}s contained in {@code byteBuffer} according to {@code type}
- */
- public static ByteBuffer[] split(ByteBuffer byteBuffer, AbstractType> type) {
- if (type instanceof CompositeType) {
- return ((CompositeType) type).split(byteBuffer);
- } else {
- return new ByteBuffer[]{byteBuffer};
- }
- }
-
- /**
- * Returns a {@code String} representation of {@code byteBuffer} validated by {@code type}.
- *
- * @param byteBuffer the {@link ByteBuffer} to be converted to {@code String}
- * @param type {@link AbstractType} of {@code byteBuffer}
- * @return a {@code String} representation of {@code byteBuffer} validated by {@code type}
- */
- public static String toString(ByteBuffer byteBuffer, AbstractType> type) {
- if (type instanceof CompositeType) {
- CompositeType composite = (CompositeType) type;
- List> types = composite.types;
- ByteBuffer[] components = composite.split(byteBuffer);
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < components.length; i++) {
- AbstractType> componentType = types.get(i);
- ByteBuffer component = components[i];
- sb.append(componentType.compose(component));
- if (i < types.size() - 1) {
- sb.append(':');
- }
- }
- return sb.toString();
- } else {
- return type.compose(byteBuffer).toString();
- }
- }
-
- /**
- * Returns the hexadecimal {@code String} representation of the specified {@link ByteBuffer}.
- *
- * @param byteBuffer a {@link ByteBuffer}
- * @return the hexadecimal {@code String} representation of {@code byteBuffer}
- */
- public static String toHex(ByteBuffer byteBuffer) {
- return byteBuffer == null ? null : ByteBufferUtil.bytesToHex(byteBuffer);
- }
-
- /**
- * Returns the hexadecimal {@code String} representation of the specified {@link BytesRef}.
- *
- * @param bytesRef a {@link BytesRef}
- * @return the hexadecimal {@code String} representation of {@code bytesRef}
- */
- public static String toHex(BytesRef bytesRef) {
- return ByteBufferUtil.bytesToHex(byteBuffer(bytesRef));
- }
-
- /**
- * Returns the hexadecimal {@code String} representation of the specified {@code byte} array.
- *
- * @param bytes the {@code byte} array
- * @return The hexadecimal {@code String} representation of {@code bytes}
- */
- public static String toHex(byte[] bytes) {
- return Hex.bytesToHex(bytes);
- }
-
- /**
- * Returns the hexadecimal {@code String} representation of the specified {@code byte}.
- *
- * @param b the {@code byte}
- * @return the hexadecimal {@code String} representation of {@code b}
- */
- public static String toHex(byte b) {
- return Hex.bytesToHex(b);
- }
-
- /**
- * Returns the {@link BytesRef} representation of the specified {@link ByteBuffer}.
- *
- * @param bb the byte buffer
- * @return the {@link BytesRef} representation of the byte buffer
- */
- public static BytesRef bytesRef(ByteBuffer bb) {
- byte[] bytes = asArray(bb);
- return new BytesRef(bytes);
- }
-
- /**
- * Returns the {@link ByteBuffer} representation of the specified {@link BytesRef}.
- *
- * @param bytesRef the {@link BytesRef}
- * @return the {@link ByteBuffer} representation of {@code bytesRef}
- */
- public static ByteBuffer byteBuffer(BytesRef bytesRef) {
- byte[] bytes = bytesRef.bytes;
- return ByteBuffer.wrap(bytes, bytesRef.offset, bytesRef.offset + bytesRef.length);
- }
-
- /**
- * Returns the {@link ByteBuffer} representation of the specified hex {@link String}.
- *
- * @param hex an hexadecimal representation of a byte array
- * @return the {@link ByteBuffer} representation of {@code hex}
- */
- public static ByteBuffer byteBuffer(String hex) {
- return hex == null ? null : ByteBufferUtil.hexToBytes(hex);
- }
-
- /**
- * Returns a {@link ByteBuffer} representing the specified array of {@link ByteBuffer}s.
- *
- * @param bbs an array of byte buffers
- * @return a {@link ByteBuffer} representing {@code bbs}
- */
- public static ByteBuffer compose(ByteBuffer... bbs) {
- int totalLength = 2;
- for (ByteBuffer bb : bbs) {
- totalLength += 2 + bb.remaining();
- }
- ByteBuffer out = ByteBuffer.allocate(totalLength);
-
- ByteBufferUtil.writeShortLength(out, bbs.length);
- for (ByteBuffer bb : bbs) {
- ByteBufferUtil.writeShortLength(out, bb.remaining());
- out.put(bb.duplicate());
- }
- out.flip();
- return out;
- }
-
- /**
- * Returns the components of the specified {@link ByteBuffer} created with {@link #compose(ByteBuffer...)}.
- *
- * @param bb a byte buffer created with {@link #compose(ByteBuffer...)}
- * @return the components of {@code bb}
- */
- public static ByteBuffer[] decompose(ByteBuffer bb) {
-
- int countComponents = ByteBufferUtil.readShortLength(bb);
- ByteBuffer[] components = new ByteBuffer[countComponents];
-
- for (int i = 0; i < countComponents; i++) {
- int length = ByteBufferUtil.readShortLength(bb);
- components[i] = ByteBufferUtil.readBytes(bb, length);
- }
-
- return components;
- }
-}
\ No newline at end of file
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/TimeCounter.java b/plugin/src/main/java/com/stratio/cassandra/lucene/util/TimeCounter.java
deleted file mode 100644
index f03892660..000000000
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/TimeCounter.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2014 Stratio (http://stratio.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.stratio.cassandra.lucene.util;
-
-/**
- * Class for measuring time durations.
- *
- * @author Andres de la Pena {@literal }
- */
-public final class TimeCounter {
-
- private enum State {
- UNSTARTED, RUNNING, STOPPED
- }
-
- private long startTimeMillis, stopTimeMillis;
- private State state;
-
- /**
- * Returns a new stopped {@link TimeCounter}.
- *
- * @return a new stopped counter
- */
- public static TimeCounter create() {
- return new TimeCounter();
- }
-
- /**
- * Builds a new stopped {@link TimeCounter}.
- */
- private TimeCounter() {
- this.state = State.UNSTARTED;
- }
-
- /**
- * Starts or resumes the time count.
- *
- * @return this
- */
- public TimeCounter start() {
- switch (state) {
- case UNSTARTED:
- startTimeMillis = System.currentTimeMillis();
- break;
- case RUNNING:
- throw new IllegalStateException("Already started");
- case STOPPED:
- startTimeMillis += System.currentTimeMillis() - stopTimeMillis;
- break;
- default:
- throw new IllegalStateException("Unrecognized state " + state);
- }
- state = State.RUNNING;
- return this;
- }
-
- /**
- * Stops or suspends the time count.
- *
- * @return this
- */
- public TimeCounter stop() {
- switch (state) {
- case UNSTARTED:
- throw new IllegalStateException("Not started. ");
- case STOPPED:
- throw new IllegalStateException("Already stopped. ");
- case RUNNING:
- this.stopTimeMillis = System.currentTimeMillis();
- default:
- state = State.STOPPED;
- return this;
- }
- }
-
- /**
- * Returns a summary of the time that the stopwatch has recorded as a string.
- *
- * @return a summary of the time that the stopwatch has recorded
- */
- public String toString() {
- return String.valueOf(stopTimeMillis - startTimeMillis) + " ms";
- }
-
- /**
- * Returns the counted time in milliseconds.
- *
- * @return the counted time in milliseconds
- */
- public long getTime() {
- return stopTimeMillis - startTimeMillis;
- }
-}
diff --git a/plugin/src/main/java/com/stratio/cassandra/lucene/util/Tracer.java b/plugin/src/main/java/com/stratio/cassandra/lucene/util/Tracer.java
deleted file mode 100644
index f84e6c20c..000000000
--- a/plugin/src/main/java/com/stratio/cassandra/lucene/util/Tracer.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (C) 2014 Stratio (http://stratio.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.stratio.cassandra.lucene.util;
-
-import org.apache.cassandra.tracing.Tracing;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Wrapper for {@link Tracing} avoid testing environment failures.
- *
- * @author Andres de la Pena {@literal }
- */
-@SuppressWarnings("unused")
-public class Tracer {
-
- private static final Logger logger = LoggerFactory.getLogger(Tracer.class);
-
- /**
- * Traces the specified {@code String} message.
- *
- * @param message the message to be traced
- */
- public static void trace(String message) {
- trace(() -> Tracing.trace(message));
- }
-
- /**
- * Traces the message composed by the specified format and single argument.
- *
- * @param format the message {@code String} format
- * @param arg the argument
- */
- public static void trace(String format, Object arg) {
- trace(() -> Tracing.trace(format, arg));
- }
-
- /**
- * Traces the message composed by the specified format and arguments pair.
- *
- * @param format the message {@code String} format
- * @param arg1 the first argument
- * @param arg2 the second argument
- */
- public static void trace(String format, Object arg1, Object arg2) {
- trace(() -> Tracing.trace(format, arg1, arg2));
- }
-
- /**
- * Traces the message composed by the specified format and arguments array.
- *
- * @param format the message {@code String} format
- * @param args the arguments vararg
- */
- public static void trace(String format, Object... args) {
- trace(() -> Tracing.trace(format, args));
- }
-
- private static void trace(Runnable runnable) {
- try {
- runnable.run();
- } catch (ExceptionInInitializerError | NoClassDefFoundError e) {
- logger.warn("Unable to trace: " + e.getMessage());
- }
- }
-}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/Index.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/Index.scala
index a2fa8992b..bad9bbdfb 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/Index.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/Index.scala
@@ -21,9 +21,8 @@ import java.util.function.BiFunction
import java.util.{Collections, Optional}
import java.{util => java}
-import com.stratio.cassandra.lucene.Index.logger
import com.stratio.cassandra.lucene.search.Search
-import com.stratio.cassandra.lucene.util.JavaConversions._
+import com.stratio.cassandra.lucene.util.Logging
import org.apache.cassandra.config.{CFMetaData, ColumnDefinition}
import org.apache.cassandra.cql3.Operator
import org.apache.cassandra.db.SinglePartitionReadCommand.Group
@@ -33,25 +32,22 @@ import org.apache.cassandra.db.marshal.{AbstractType, UTF8Type}
import org.apache.cassandra.db.partitions._
import org.apache.cassandra.exceptions.{ConfigurationException, InvalidRequestException}
import org.apache.cassandra.index.Index.{Indexer, Searcher}
-import org.apache.cassandra.index.IndexRegistry
import org.apache.cassandra.index.transactions.IndexTransaction
+import org.apache.cassandra.index.{IndexRegistry, Index => CassandraIndex}
import org.apache.cassandra.schema.IndexMetadata
import org.apache.cassandra.service.ClientState
import org.apache.cassandra.utils.concurrent.OpOrder
-import org.slf4j.LoggerFactory
-/** [[org.apache.cassandra.index.Index]] that uses Apache Lucene as backend. It allows, among
+/** [[CassandraIndex]] that uses Apache Lucene as backend. It allows, among
* others, multi-column and full-text search.
*
* @param table the indexed table
* @param indexMetadata the index's metadata
* @author Andres de la Pena `adelapena@stratio.com`
*/
-class Index(
- table: ColumnFamilyStore,
- indexMetadata: IndexMetadata)
- extends org.apache.cassandra.index.Index {
+class Index(table: ColumnFamilyStore, indexMetadata: IndexMetadata)
+ extends CassandraIndex with Logging {
logger.debug(s"Building Lucene index ${table.metadata} $indexMetadata")
@@ -120,7 +116,7 @@ class Index(
*
* @return the Index's backing storage table
*/
- override def getBackingTable: Optional[ColumnFamilyStore] = None
+ override def getBackingTable: Optional[ColumnFamilyStore] = Optional.empty()
/** Return a task which performs a blocking flush of the index's data to persistent storage.
*
@@ -193,7 +189,7 @@ class Index(
*/
override def supportsExpression(column: ColumnDefinition, operator: Operator): Boolean = {
logger.trace(s"Asking if the index supports the expression $column $operator")
- service.supportsExpression(column, operator)
+ service.expressionMapper.supports(column, operator)
}
/** If the index supports custom search expressions using the {{{SELECT * FROM table WHERE
@@ -220,7 +216,7 @@ class Index(
*/
override def getPostIndexQueryFilter(filter: RowFilter): RowFilter = {
logger.trace(s"Getting the post index query filter for $filter")
- service.getPostIndexQueryFilter(filter)
+ service.expressionMapper.postIndexQueryFilter(filter)
}
/** Return an estimate of the number of results this index is expected to return for any given
@@ -294,13 +290,11 @@ class Index(
*/
override def postProcessorFor(command: ReadCommand)
: BiFunction[PartitionIterator, ReadCommand, PartitionIterator] = {
- (partitions: PartitionIterator, command: ReadCommand) => service.postProcess(
- partitions,
- command)
+ new ReadCommandPostProcessor(service)
}
def postProcessorFor(group: Group): BiFunction[PartitionIterator, Group, PartitionIterator] = {
- (partitions: PartitionIterator, group: Group) => service.postProcess(partitions, group)
+ new GroupPostProcessor(service)
}
/** Factory method for query time search helper. Custom index implementations should perform any
@@ -315,10 +309,7 @@ class Index(
override def searcherFor(command: ReadCommand): Searcher = {
logger.trace(s"Getting searcher for $command")
try {
- new Searcher {
- override def search(controller: ReadExecutionController): UnfilteredPartitionIterator =
- service.search(command, controller)
- }
+ controller => service.search(command, controller)
} catch {
case e: Exception =>
logger.error(s"Error getting searcher for command: $command", e)
@@ -344,9 +335,8 @@ class Index(
}
-object Index {
-
- private val logger = LoggerFactory.getLogger(classOf[Index])
+/** Companion object for [[Index]]. */
+object Index extends Logging {
// Setup CQL query handler
try {
@@ -355,7 +345,7 @@ object Index {
val modifiersField = classOf[Field].getDeclaredField("modifiers")
modifiersField.setAccessible(true)
modifiersField.setInt(field, field.getModifiers & ~Modifier.FINAL)
- field.set(null, new IndexQueryHandler());
+ field.set(null, new IndexQueryHandler);
} catch {
case e: Exception => logger.error("Unable to set Lucene CQL query handler", e)
}
@@ -382,4 +372,4 @@ object Index {
Collections.emptyMap[String, String]
}
-}
+}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexException.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexException.scala
index 72d15d865..f6146b431 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexException.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexException.scala
@@ -16,7 +16,6 @@
package com.stratio.cassandra.lucene
import com.stratio.cassandra.lucene.IndexException._
-import org.slf4j.Logger
import org.slf4j.helpers.MessageFormatter
/** [[RuntimeException]] to be thrown when there are Lucene index-related errors.
@@ -82,8 +81,7 @@ case class IndexException(
def this(message: String, a1: AnyRef, a2: AnyRef, a3: AnyRef, a4: AnyRef) =
this(formatN(message, a1, a2, a3, a4), null)
- /**
- * Constructs a new index exception with the specified formatted detail message.
+ /** Constructs a new index exception with the specified formatted detail message.
*
* @param cause the cause
* @param message the detail message
@@ -133,87 +131,9 @@ case class IndexException(
def this(cause: Throwable, message: String, a1: AnyRef, a2: AnyRef, a3: AnyRef, a4: AnyRef) =
this(formatN(message, a1, a2, a3, a4), cause)
-
- /** Constructs a new index exception with the specified formatted detail message.
- *
- * @param logger a logger to log the message with ERROR level
- * @param cause the cause
- * @param message the detail message
- */
- def this(logger: Logger, cause: Throwable, message: String) = {
- this(message, cause)
- logger.error(getMessage, cause)
- }
-
- /** Constructs a new index exception with the specified formatted detail message.
- *
- * @param logger a logger to log the message with ERROR level
- * @param cause the cause
- * @param message the detail message
- * @param a1 first argument
- */
- def this(logger: Logger, cause: Throwable, message: String, a1: AnyRef) = {
- this(format1(message, a1), cause)
- logger.error(getMessage, cause)
- }
-
- /** Constructs a new index exception with the specified formatted detail message.
- *
- * @param logger a logger to log the message with ERROR level
- * @param cause the cause
- * @param message the detail message
- * @param a1 first argument
- * @param a2 second argument
- */
- def this(logger: Logger, cause: Throwable, message: String, a1: AnyRef, a2: AnyRef) = {
- this(format2(message, a1, a2), cause)
- logger.error(getMessage, cause)
- }
-
- /** Constructs a new index exception with the specified formatted detail message.
- *
- * @param logger a logger to log the message with ERROR level
- * @param cause the cause
- * @param message the detail message
- * @param a1 first argument
- * @param a2 second argument
- * @param a3 third argument
- */
- def this(
- logger: Logger,
- cause: Throwable,
- message: String,
- a1: AnyRef,
- a2: AnyRef,
- a3: AnyRef) = {
- this(formatN(message, a1, a2, a3), cause)
- logger.error(getMessage, cause)
- }
-
- /** Constructs a new index exception with the specified formatted detail message.
- *
- * @param logger a logger to log the message with ERROR level
- * @param cause the cause
- * @param message the detail message
- * @param a1 first argument
- * @param a2 second argument
- * @param a3 third argument
- * @param a4 fourth argument
- */
- def this(
- logger: Logger,
- cause: Throwable,
- message: String,
- a1: AnyRef,
- a2: AnyRef,
- a3: AnyRef,
- a4: AnyRef) = {
- this(formatN(message, a1, a2, a3, a4), cause)
- logger.error(getMessage, cause)
- }
-
}
+/** Companion object for [[IndexException]]. */
object IndexException {
private def format1(message: String, arg: AnyRef): String = {
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexOptions.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexOptions.scala
index 23ce4032d..6e3dcee8c 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexOptions.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexOptions.scala
@@ -19,13 +19,14 @@ import java.io.File
import java.nio.file.{Path, Paths}
import com.stratio.cassandra.lucene.IndexOptions._
-import com.stratio.cassandra.lucene.column.ColumnsMapper
+import com.stratio.cassandra.lucene.partitioning.{PartitionerOnNone, Partitioner}
import com.stratio.cassandra.lucene.schema.{Schema, SchemaBuilder}
+import com.stratio.cassandra.lucene.util.SchemaValidator
import org.apache.cassandra.config.CFMetaData
import org.apache.cassandra.db.Directories
import org.apache.cassandra.schema.IndexMetadata
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
/** Index user-specified configuration options parser.
*
@@ -35,7 +36,7 @@ import scala.collection.JavaConversions._
*/
class IndexOptions(tableMetadata: CFMetaData, indexMetadata: IndexMetadata) {
- val options: Map[String, String] = indexMetadata.options.toMap
+ val options = indexMetadata.options.asScala.toMap
/** The Lucene index searcher refresh frequency, in seconds */
val refreshSeconds = parseRefresh(options)
@@ -61,10 +62,14 @@ class IndexOptions(tableMetadata: CFMetaData, indexMetadata: IndexMetadata) {
/** The mapping schema */
val schema = parseSchema(options, tableMetadata)
+ /** The index partitioner */
+ val partitioner = parsePartitioner(options, tableMetadata)
+
/** The path of the directory where the index files will be stored */
val path = parsePath(options, tableMetadata, Some(indexMetadata))
}
+/** Companion object for [[IndexOptions]]. */
object IndexOptions {
val REFRESH_SECONDS_OPTION = "refresh_seconds"
@@ -93,14 +98,16 @@ object IndexOptions {
val SCHEMA_OPTION = "schema"
- /**
- * Validates the specified index options.
+ val PARTITIONER_OPTION = "partitioner"
+ val DEFAULT_PARTITIONER = PartitionerOnNone()
+
+ /** Validates the specified index options.
*
* @param options the options to be validated
* @param metadata the indexed table metadata
*/
def validate(options: java.util.Map[String, String], metadata: CFMetaData) {
- val o = options.toMap
+ val o = options.asScala.toMap
parseRefresh(o)
parseRamBufferMB(o)
parseMaxMergeMB(o)
@@ -110,6 +117,7 @@ object IndexOptions {
parseExcludedDataCenters(o)
parseSchema(o, metadata)
parsePath(o, metadata, None)
+ parsePartitioner(o, metadata)
}
def parseRefresh(options: Map[String, String]): Double = {
@@ -160,17 +168,24 @@ object IndexOptions {
options.get(SCHEMA_OPTION).map(
value => try {
val schema = SchemaBuilder.fromJson(value).build
- for (mapper <- schema.mappers.values; column <- mapper.mappedColumns) {
- ColumnsMapper.validate(table, column, mapper.field, mapper.supportedTypes)
- }
+ SchemaValidator.validate(schema, table)
schema
} catch {
- case e: Exception => throw new IndexException(
- e,
+ case e: Exception => throw new IndexException(e,
s"'$SCHEMA_OPTION' is invalid : ${e.getMessage}")
}).getOrElse(throw new IndexException(s"'$SCHEMA_OPTION' is required"))
}
+ def parsePartitioner(options: Map[String, String], table: CFMetaData): Partitioner = {
+ options.get(PARTITIONER_OPTION).map(
+ value => try {
+ Partitioner.fromJson(value)
+ } catch {
+ case e: Exception => throw new IndexException(e,
+ s"'$PARTITIONER_OPTION' is invalid : ${e.getMessage}")
+ }).getOrElse(DEFAULT_PARTITIONER)
+ }
+
private def parseInt(options: Map[String, String], name: String, default: Int): Int = {
options.get(name).map(
string => try string.toInt catch {
@@ -208,4 +223,6 @@ object IndexOptions {
throw new IndexException(s"'$name' must be strictly positive, found: $double")
}).getOrElse(default)
}
+
+
}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexPagingState.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexPagingState.scala
index 83f75f9bd..dbd7d8b69 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexPagingState.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexPagingState.scala
@@ -16,12 +16,11 @@
package com.stratio.cassandra.lucene
import java.nio.ByteBuffer
-import java.{util => java}
import com.google.common.base.MoreObjects
import com.stratio.cassandra.lucene.IndexPagingState._
import com.stratio.cassandra.lucene.search.SearchBuilder
-import com.stratio.cassandra.lucene.util.{ByteBufferUtils, SimplePartitionIterator, SimpleRowIterator}
+import com.stratio.cassandra.lucene.util.{ByteBufferUtils, SimplePartitionIterator, SingleRowIterator}
import org.apache.cassandra.config.DatabaseDescriptor
import org.apache.cassandra.db._
import org.apache.cassandra.db.filter.RowFilter
@@ -30,7 +29,8 @@ import org.apache.cassandra.db.partitions.PartitionIterator
import org.apache.cassandra.service.LuceneStorageProxy
import org.apache.cassandra.service.pager.PagingState
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
+import scala.collection.mutable
/** The paging state of a CQL query using Lucene. It tracks the primary keys of the last seen rows
* for each internal read command of a CQL query. It also keeps the count of the remaining rows.
@@ -45,7 +45,7 @@ class IndexPagingState(var remaining: Int) {
private var hasMorePages: Boolean = true
/** The last row positions */
- private val entries = new java.HashMap[DecoratedKey, Clustering]
+ private val entries = mutable.LinkedHashMap.empty[DecoratedKey, Clustering]
/** Returns the primary key of the last seen row for the specified read command.
*
@@ -64,15 +64,14 @@ class IndexPagingState(var remaining: Int) {
private def indexExpression(command: ReadCommand): RowFilter.Expression = {
// Try with custom expressions
- command.rowFilter.getExpressions.find(_.isCustom).foreach(return _)
+ command.rowFilter.getExpressions.asScala.find(_.isCustom).foreach(return _)
// Try with dummy column
val cfs = Keyspace.open(command.metadata.ksName).getColumnFamilyStore(command.metadata.cfName)
- for (expr <- command.rowFilter.getExpressions) {
- for (index <- cfs.indexManager.listIndexes) {
- if (index.isInstanceOf[Index] && index.supportsExpression(
- expr.column,
- expr.operator)) return expr
+ for (expr <- command.rowFilter.getExpressions.asScala) {
+ for (index <- cfs.indexManager.listIndexes.asScala) {
+ if (index.isInstanceOf[Index] && index.supportsExpression(expr.column, expr.operator))
+ return expr
}
}
throw new IndexException("Not found expression")
@@ -86,7 +85,7 @@ class IndexPagingState(var remaining: Int) {
@throws[ReflectiveOperationException]
def rewrite(query: ReadQuery): Unit = query match {
case group: SinglePartitionReadCommand.Group =>
- group.commands.foreach(rewrite)
+ group.commands.forEach(rewrite)
case read: ReadCommand =>
val expression = indexExpression(read)
val oldValue = expressionValueField.get(expression).asInstanceOf[ByteBuffer]
@@ -116,14 +115,14 @@ class IndexPagingState(var remaining: Int) {
private def update(
group: SinglePartitionReadCommand.Group,
partitions: PartitionIterator): PartitionIterator = {
- val rowIterators = new java.LinkedList[SimpleRowIterator]
+ val rowIterators = mutable.ListBuffer.empty[SingleRowIterator]
var count = 0
- for (partition <- partitions) {
+ for (partition <- partitions.asScala) {
val key = partition.partitionKey
while (partition.hasNext) {
- val newRowIterator = new SimpleRowIterator(partition)
- rowIterators.add(newRowIterator)
- entries.put(key, newRowIterator.row.clustering)
+ val newRowIterator = new SingleRowIterator(partition)
+ rowIterators += newRowIterator
+ entries.put(key, newRowIterator.row.clustering())
if (remaining > 0) remaining -= 1
count += 1
}
@@ -141,19 +140,19 @@ class IndexPagingState(var remaining: Int) {
// Collect query bounds
val rangeMerger = LuceneStorageProxy.rangeMerger(command, consistency)
- val bounds = rangeMerger.map(_.range).toList
+ val bounds = rangeMerger.asScala.map(_.range).toList
- val rowIterators = new java.LinkedList[SimpleRowIterator]
+ val rowIterators = mutable.ListBuffer.empty[SingleRowIterator]
var count = 0
- for (partition <- partitions) {
+ for (partition <- partitions.asScala) {
val key = partition.partitionKey
val bound = bounds.find(_ contains key)
while (partition.hasNext) {
bound.foreach(bound => entries.keys.filter(bound.contains).foreach(entries.remove))
- val newRowIterator = new SimpleRowIterator(partition)
- rowIterators.add(newRowIterator)
+ val newRowIterator = new SingleRowIterator(partition)
+ rowIterators += newRowIterator
val clustering = newRowIterator.row.clustering
entries.put(key, clustering)
if (remaining > 0) remaining -= 1
@@ -175,7 +174,7 @@ class IndexPagingState(var remaining: Int) {
if (hasMorePages) new PagingState(toByteBuffer, null, remaining, remaining) else null
}
- /** @inheritdoc */
+ /** @inheritdoc*/
override def toString: String = {
MoreObjects.toStringHelper(this).add("remaining", remaining).add("entries", entries).toString
}
@@ -201,6 +200,7 @@ class IndexPagingState(var remaining: Int) {
}
+/** Companion object for [[IndexPagingState]]. */
object IndexPagingState {
private lazy val expressionValueField = classOf[RowFilter.Expression].getDeclaredField("value")
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexPostProcessor.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexPostProcessor.scala
new file mode 100644
index 000000000..4ceac9587
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexPostProcessor.scala
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene
+
+import java.util.Collections
+import java.util.function.BiFunction
+
+import com.stratio.cassandra.lucene.IndexPostProcessor._
+import com.stratio.cassandra.lucene.index.RAMIndex
+import com.stratio.cassandra.lucene.search.Search
+import com.stratio.cassandra.lucene.util._
+import org.apache.cassandra.db.SinglePartitionReadCommand.Group
+import org.apache.cassandra.db.partitions.PartitionIterator
+import org.apache.cassandra.db.rows.Row
+import org.apache.cassandra.db.{DecoratedKey, ReadCommand, ReadQuery, SinglePartitionReadCommand}
+import org.apache.lucene.document.{Document, StoredField}
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+
+/** Post processes in the coordinator node the results of a distributed search. In other words,
+ * gets the k globally best results from all the k best node-local results.
+ *
+ * @param service the index service
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+sealed abstract class IndexPostProcessor[A <: ReadQuery](service: IndexService)
+ extends BiFunction[PartitionIterator, A, PartitionIterator] with Logging with Tracing {
+
+ /** Returns a partition iterator containing the top-k rows of the specified partition iterator
+ * according to the specified search.
+ *
+ * @param partitions a partition iterator
+ * @param search a search defining the ordering
+ * @param limit the number of results to be returned
+ * @param now the operation time in seconds
+ * @return
+ */
+ protected def process(partitions: PartitionIterator, search: Search, limit: Int, now: Int)
+ : PartitionIterator = {
+ if (search.requiresFullScan) {
+ val rows = collect(partitions)
+ if (search.requiresPostProcessing && rows.nonEmpty) {
+ return top(rows, search, limit, now)
+ }
+ }
+ partitions
+ }
+
+ /** Collects the rows of the specified partition iterator. The iterator gets traversed after this
+ * operation so it can't be reused.
+ *
+ * @param partitions a partition iterator
+ * @return the rows contained in the partition iterator
+ */
+ private def collect(partitions: PartitionIterator): List[(DecoratedKey, SingleRowIterator)] = {
+ val time = TimeCounter.start
+ val rows = mutable.ListBuffer[(DecoratedKey, SingleRowIterator)]()
+ for (partition <- partitions.asScala) {
+ try {
+ val key = partition.partitionKey
+ while (partition.hasNext) {
+ rows += ((key, new SingleRowIterator(partition)))
+ }
+ } finally partition.close()
+ }
+ logger.debug(s"Collected ${rows.size} rows in $time")
+ rows.toList
+ }
+
+ /** Takes the k best rows of the specified rows according to the specified search.
+ *
+ * @param rows the rows to be sorted
+ * @param search a search defining the ordering
+ * @param limit the number of results to be returned
+ * @param now the operation time in seconds
+ * @return
+ */
+ private def top(
+ rows: List[(DecoratedKey, SingleRowIterator)],
+ search: Search,
+ limit: Int,
+ now: Int): PartitionIterator = {
+
+ val time = TimeCounter.start
+ val index = new RAMIndex(service.schema.analyzer)
+ try {
+
+ // Index collected rows in memory
+ for (id <- rows.indices) {
+ val (key, rowIterator) = rows(id)
+ val row = rowIterator.row
+ val doc = document(key, row, search, now)
+ doc.add(new StoredField(ID_FIELD, id)) // Mark document
+ index.add(doc)
+ }
+
+ // Repeat search to sort partial results
+ val query = search.postProcessingQuery(service.schema)
+ val sort = service.sort(search)
+ val docs = index.search(query, sort, limit, FIELDS_TO_LOAD)
+
+ // Collect and decorate
+ val merged = for ((doc, score) <- docs) yield {
+ val id = doc.get(ID_FIELD).toInt
+ val rowIterator = rows(id)._2
+ rowIterator.decorated(row => service.expressionMapper.decorate(row, score, now))
+ }
+
+ tracer.trace(s"Lucene post-process ${rows.size} collected rows to ${merged.size} rows")
+ logger.debug(s"Post-processed ${rows.size} rows to ${merged.size} rows in $time")
+ new SimplePartitionIterator(merged)
+
+ } finally index.close()
+ }
+
+ /** Returns a [[Document]] representing the specified row with only the fields required to satisfy
+ * the specified [[Search]].
+ *
+ * @param key a partition key
+ * @param row a row
+ * @param search a search
+ * @return a document with just the fields required to satisfy the search
+ */
+ private def document(key: DecoratedKey, row: Row, search: Search, now: Int): Document = {
+ val document = new Document
+ val clustering = row.clustering()
+ val columns = service.columnsMapper.columns(key, row, now)
+ service.keyIndexableFields(key, clustering).foreach(document.add)
+ service.schema.postProcessingIndexableFields(columns, search).forEach(document add _)
+ document
+ }
+}
+
+/** Companion object for [[IndexPostProcessor]]. */
+object IndexPostProcessor {
+
+ val ID_FIELD = "_id"
+ val FIELDS_TO_LOAD: java.util.Set[String] = Collections.singleton(ID_FIELD)
+
+}
+
+/** An [[IndexPostProcessor]] for [[ReadCommand]]s.
+ *
+ * @param service the index service
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class ReadCommandPostProcessor(service: IndexService)
+ extends IndexPostProcessor[ReadCommand](service) {
+
+ /** @inheritdoc */
+ override def apply(partitions: PartitionIterator, command: ReadCommand): PartitionIterator = {
+ if (!partitions.hasNext || command.isInstanceOf[SinglePartitionReadCommand]) return partitions
+ val search = service.expressionMapper.search(command)
+ process(partitions, search, command.limits.count, command.nowInSec)
+ }
+
+}
+
+/** An [[IndexPostProcessor]] for [[Group]] commands.
+ *
+ * @param service the index service
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class GroupPostProcessor(service: IndexService) extends IndexPostProcessor[Group](service) {
+
+ /** @inheritdoc */
+ override def apply(partitions: PartitionIterator, group: Group): PartitionIterator = {
+ if (!partitions.hasNext || group.commands.size <= 1) return partitions
+ val search = service.expressionMapper.search(group.commands.get(0))
+ process(partitions, search, group.limits.count, group.nowInSec)
+ }
+
+}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexQueryHandler.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexQueryHandler.scala
index d3931219b..1ad0b55d5 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexQueryHandler.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexQueryHandler.scala
@@ -19,7 +19,7 @@ import java.nio.ByteBuffer
import java.{util => java}
import com.stratio.cassandra.lucene.IndexQueryHandler._
-import com.stratio.cassandra.lucene.util.TimeCounter
+import com.stratio.cassandra.lucene.util.{Logging, TimeCounter}
import org.apache.cassandra.cql3._
import org.apache.cassandra.cql3.statements.RequestValidations.checkNotNull
import org.apache.cassandra.cql3.statements.{BatchStatement, IndexTarget, ParsedStatement, SelectStatement}
@@ -32,16 +32,16 @@ import org.apache.cassandra.service.{LuceneStorageProxy, QueryState}
import org.apache.cassandra.transport.messages.ResultMessage
import org.apache.cassandra.transport.messages.ResultMessage.{Prepared, Rows}
import org.apache.cassandra.utils.{FBUtilities, MD5Digest}
-import org.slf4j.LoggerFactory
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
+import scala.collection.mutable
/** [[QueryHandler]] to be used with Lucene searches.
*
* @author Andres de la Pena `adelapena@stratio.com`
*/
-class IndexQueryHandler extends QueryHandler {
+class IndexQueryHandler extends QueryHandler with Logging {
type Payload = java.Map[String, ByteBuffer]
@@ -111,14 +111,14 @@ class IndexQueryHandler extends QueryHandler {
statement match {
case select: SelectStatement =>
val expressions = luceneExpressions(select, options)
- if (!expressions.isEmpty) {
- val time = TimeCounter.create.start
+ if (expressions.nonEmpty) {
+ val time = TimeCounter.start
try {
return executeLuceneQuery(select, state, options, expressions)
} catch {
case e: ReflectiveOperationException => throw new IndexException(e)
} finally {
- logger.debug(s"Lucene search total time: ${time.stop}\n")
+ logger.debug(s"Lucene search total time: $time\n")
}
}
case _ =>
@@ -128,22 +128,22 @@ class IndexQueryHandler extends QueryHandler {
def luceneExpressions(
select: SelectStatement,
- options: QueryOptions): java.Map[Expression, Index] = {
- val map = new java.LinkedHashMap[Expression, Index]
+ options: QueryOptions): Map[Expression, Index] = {
+ val map = mutable.LinkedHashMap.empty[Expression, Index]
val expressions = select.getRowFilter(options).getExpressions
val cfs = Keyspace.open(select.keyspace).getColumnFamilyStore(select.columnFamily)
- val indexes = cfs.indexManager.listIndexes.collect { case index: Index => index }
- expressions.foreach {
+ val indexes = cfs.indexManager.listIndexes.asScala.collect { case index: Index => index }
+ expressions.forEach {
case expression: CustomExpression =>
val clazz = expression.getTargetIndex.options.get(IndexTarget.CUSTOM_INDEX_OPTION_NAME)
if (clazz == classOf[Index].getCanonicalName) {
val index = cfs.indexManager.getIndex(expression.getTargetIndex).asInstanceOf[Index]
- map.put(expression, index)
+ map += expression -> index
}
- case expr =>
+ case expr: Expression =>
indexes.filter(_.supportsExpression(expr.column, expr.operator)).foreach(map.put(expr, _))
}
- map
+ map.toMap
}
def execute(statement: CQLStatement, state: QueryState, options: QueryOptions): ResultMessage = {
@@ -156,7 +156,7 @@ class IndexQueryHandler extends QueryHandler {
select: SelectStatement,
state: QueryState,
options: QueryOptions,
- expressions: java.Map[Expression, Index]): ResultMessage = {
+ expressions: Map[Expression, Index]): ResultMessage = {
if (expressions.size > 1) {
throw new InvalidRequestException(
@@ -168,8 +168,7 @@ class IndexQueryHandler extends QueryHandler {
}
// Validate expression
- val expression = expressions.keys.head
- val index = expressions.get(expression)
+ val (expression, index) = expressions.head
val search = index.validate(expression)
// Get paging info
@@ -229,10 +228,9 @@ class IndexQueryHandler extends QueryHandler {
}
}
+/** Companion object for [[IndexQueryHandler]]. */
object IndexQueryHandler {
- val logger = LoggerFactory.getLogger(classOf[IndexQueryHandler])
-
val getPageSize = classOf[SelectStatement].getDeclaredMethod("getPageSize", classOf[QueryOptions])
getPageSize.setAccessible(true)
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexService.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexService.scala
index 4104b7c19..48d9a17ad 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexService.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexService.scala
@@ -16,34 +16,26 @@
package com.stratio.cassandra.lucene
import java.lang.management.ManagementFactory
-import java.{util => java}
import javax.management.{JMException, ObjectName}
-import com.stratio.cassandra.lucene.IndexService._
-import com.stratio.cassandra.lucene.column.Columns
-import com.stratio.cassandra.lucene.index.{DocumentIterator, FSIndex, RAMIndex}
-import com.stratio.cassandra.lucene.key.{PartitionMapper, TokenMapper}
-import com.stratio.cassandra.lucene.search.{Search, SearchBuilder}
+import com.stratio.cassandra.lucene.index.{DocumentIterator, PartitionedIndex}
+import com.stratio.cassandra.lucene.mapping._
+import com.stratio.cassandra.lucene.search.Search
import com.stratio.cassandra.lucene.util._
-import org.apache.cassandra.config.{CFMetaData, ColumnDefinition}
-import org.apache.cassandra.cql3.Operator
-import org.apache.cassandra.cql3.statements.IndexTarget
+import org.apache.cassandra.config.ColumnDefinition
import org.apache.cassandra.db._
-import org.apache.cassandra.db.filter.RowFilter.CustomExpression
import org.apache.cassandra.db.filter._
-import org.apache.cassandra.db.marshal.UTF8Type
import org.apache.cassandra.db.partitions._
import org.apache.cassandra.db.rows._
import org.apache.cassandra.index.transactions.IndexTransaction
import org.apache.cassandra.schema.IndexMetadata
+import org.apache.cassandra.utils.FBUtilities
import org.apache.cassandra.utils.concurrent.OpOrder
-import org.apache.commons.lang3.StringUtils
-import org.apache.lucene.document.{Document, StoredField}
+import org.apache.lucene.document.Document
import org.apache.lucene.index.{IndexableField, Term}
-import org.apache.lucene.search.{Query, ScoreDoc, Sort, SortField}
-import org.slf4j.LoggerFactory
+import org.apache.lucene.search.{Query, Sort, SortField}
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
import scala.collection.mutable
/** Lucene index service provider.
@@ -53,32 +45,35 @@ import scala.collection.mutable
* @author Andres de la Pena `adelapena@stratio.com`
*/
abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: IndexMetadata)
- extends IndexServiceMBean {
+ extends IndexServiceMBean with Logging with Tracing {
val metadata = table.metadata
val ksName = metadata.ksName
val cfName = metadata.cfName
- val name = indexMetadata.name
- val column = indexedColumn(indexMetadata)
- val columnDefinition = getColumnDefinition(metadata, column)
- val qualifiedName = s"$ksName.$cfName.$name"
+ val idxName = indexMetadata.name
+ val qualifiedName = s"$ksName.$cfName.$idxName"
// Parse options
val options = new IndexOptions(metadata, indexMetadata)
- // Setup mapping
+ // Setup schema
val schema = options.schema
+ val regulars = metadata.partitionColumns.regulars.asScala.toSet
+ val mappedRegulars = regulars.map(_.name.toString).filter(schema.mappedCells.contains)
+ val mapsMultiCell = regulars.exists(x => x.`type`.isMultiCell && schema.mapsCell(x.name.toString))
+
+ // Setup mapping
val tokenMapper = new TokenMapper
val partitionMapper = new PartitionMapper(metadata)
- val regularCells = metadata.partitionColumns.regulars
- val mappedRegularCells = regularCells.map(_.name.toString).filter(schema.mappedCells.contains)
- val mapsMultiCells = regularCells
- .exists(x => x.`type`.isMultiCell && schema.mapsCell(x.name.toString))
+ val columnsMapper = new ColumnsMapper(schema, metadata)
+ val expressionMapper = ExpressionMapper(metadata, indexMetadata)
// Setup FS index and write queue
val queue = TaskQueue.build(options.indexingThreads, options.indexingQueuesSize)
- val lucene = new FSIndex(
- name,
+ val partitioner = options.partitioner
+ val lucene = new PartitionedIndex(
+ partitioner.numPartitions,
+ idxName,
options.path,
options.schema.analyzer,
options.refreshSeconds,
@@ -96,14 +91,14 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
val sort = new Sort(keySortFields.toArray: _*)
lucene.init(sort, fieldsToLoad)
} catch {
- case e: Exception => logger
- .error(s"Initialization of Lucene FS directory for index '$name' has failed:", e)
+ case e: Exception =>
+ logger.error(s"Initialization of Lucene FS directory for index '$idxName' has failed", e)
}
// Register JMX MBean
try {
val mBeanName = "com.stratio.cassandra.lucene:type=Lucene," +
- s"keyspace=$ksName,table=$cfName,index=$name"
+ s"keyspace=$ksName,table=$cfName,index=$idxName"
mBean = new ObjectName(mBeanName)
ManagementFactory.getPlatformMBeanServer.registerMBean(this, this.mBean)
} catch {
@@ -121,17 +116,9 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
*
* @return the names of the fields to be loaded
*/
- def fieldsToLoad: Set[String]
+ def fieldsToLoad: java.util.Set[String]
- /** Returns a [[Columns]] representing the specified row.
- *
- * @param key the partition key
- * @param row the row
- * @return the columns representing the specified row
- */
- def columns(key: DecoratedKey, row: Row): Columns
-
- def keyIndexableFields(key: DecoratedKey, row: Row): List[IndexableField]
+ def keyIndexableFields(key: DecoratedKey, clustering: Clustering): List[IndexableField]
/** Returns if the specified column definition is mapped by this index.
*
@@ -142,74 +129,22 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
schema.mapsCell(columnDef.name.toString)
}
- /** Returns if the specified expression is targeted to this index
- *
- * @param expression a CQL query expression
- * @return `true` if `expression` is targeted to this index, `false` otherwise
- */
- def supportsExpression(expression: RowFilter.Expression): Boolean = {
- supportsExpression(expression.column, expression.operator)
- }
-
- /** Returns if a CQL expression with the specified column definition and operator is targeted to
- * this index.
- *
- * @param columnDef the expression column definition
- * @param operator the expression operator
- * @return `true` if the expression is targeted to this index, `false` otherwise
- */
- def supportsExpression(columnDef: ColumnDefinition, operator: Operator): Boolean = {
- operator == Operator.EQ && column.contains(columnDef.name.toString)
- }
-
- /** Returns a copy of the specified [[RowFilter]] without any Lucene expressions.
- *
- * @param filter a row filter
- * @return a copy of `filter` without Lucene expressions
- */
- def getPostIndexQueryFilter(filter: RowFilter): RowFilter = {
- if (column.isEmpty) return filter
- filter.foldLeft(filter)((f, e) => if (supportsExpression(e)) f.without(e) else f)
- }
-
/** Returns the validated search contained in the specified expression.
*
* @param expression a custom CQL expression
* @return the validated expression
*/
def validate(expression: RowFilter.Expression): Search = {
- val value = expression match {
- case c: CustomExpression => c.getValue
- case _ => expression.getIndexValue
- }
- val json = UTF8Type.instance.compose(value)
- val search = SearchBuilder.fromJson(json).build
- search.validate(schema)
- }
-
- /** Returns the Lucene document representing the specified row. Only the fields required by the
- * post processing phase of the specified search will be added.
- *
- * @param key the partition key
- * @param row the row
- * @param search a search
- * @return a document
- */
- def document(key: DecoratedKey, row: Row, search: Search): Document = {
- val doc = new Document
- val cols = columns(key, row)
- keyIndexableFields(key, row).foreach(doc.add)
- schema.postProcessingIndexableFields(cols, search).foreach(doc.add)
- doc
+ expressionMapper.search(expression).validate(schema)
}
/** Returns a Lucene term uniquely identifying the specified row.
*
- * @param key the partition key
- * @param row the row
+ * @param key the partition key
+ * @param clustering the clustering key
* @return a Lucene identifying term
*/
- def term(key: DecoratedKey, row: Row): Term
+ def term(key: DecoratedKey, clustering: Clustering): Term
/** Returns a Lucene term identifying documents representing all the row's which are in the
* partition the specified [[DecoratedKey]].
@@ -229,7 +164,7 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
* @return `true` if read-before-write is required, `false` otherwise
*/
def needsReadBeforeWrite(key: DecoratedKey, row: Row): Boolean = {
- mapsMultiCells || !row.columns.map(_.name.toString).containsAll(mappedRegularCells)
+ mapsMultiCell || !mappedRegulars.subsetOf(row.columns.asScala.map(_.name.toString).toSet)
}
/** Returns the [[DecoratedKey]] contained in the specified Lucene document.
@@ -275,33 +210,39 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
/** Upserts the specified row.
*
* @param key the partition key
- * @param row the row to be upserted
+ * @param row the row
* @param nowInSec now in seconds
*/
def upsert(key: DecoratedKey, row: Row, nowInSec: Int) {
queue.submitAsynchronous(
key, () => {
- val t = term(key, row)
- val cols = columns(key, row).withoutDeleted(nowInSec)
- val fields = schema.indexableFields(cols)
+ val partition = partitioner.partition(key)
+ val clustering = row.clustering()
+ val term = this.term(key, clustering)
+ val columns = columnsMapper.columns(key, row, nowInSec)
+ val fields = schema.indexableFields(columns)
if (fields.isEmpty) {
- lucene.delete(t)
+ lucene.delete(partition, term)
} else {
- val doc = new Document()
- keyIndexableFields(key, row).foreach(doc.add)
- fields.foreach(doc.add)
- lucene.upsert(t, doc)
+ val doc = new Document
+ keyIndexableFields(key, clustering).foreach(doc.add)
+ fields.forEach(doc add _)
+ lucene.upsert(partition, term, doc)
}
})
}
/** Deletes the partition identified by the specified key.
*
- * @param key the partition key
- * @param row the row to be deleted
+ * @param key the partition key
+ * @param clustering the clustering key
*/
- def delete(key: DecoratedKey, row: Row) {
- queue.submitAsynchronous(key, () => lucene.delete(term(key, row)))
+ def delete(key: DecoratedKey, clustering: Clustering) {
+ queue.submitAsynchronous(key, () => {
+ val partition = partitioner.partition(key)
+ val term = this.term(key, clustering)
+ lucene.delete(partition, term)
+ })
}
/** Deletes the partition identified by the specified key.
@@ -309,7 +250,11 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
* @param key the partition key
*/
def delete(key: DecoratedKey) {
- queue.submitAsynchronous(key, () => lucene.delete(term(key)))
+ queue.submitAsynchronous(key, () => {
+ val partition = partitioner.partition(key)
+ val term = this.term(key)
+ lucene.delete(partition, term)
+ })
}
/** Returns a new index searcher for the specified read command.
@@ -323,45 +268,26 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
controller: ReadExecutionController): UnfilteredPartitionIterator = {
// Parse search
- Tracer.trace("Building Lucene search")
- val expr = expression(command)
- val search = SearchBuilder.fromJson(expr).build
- val q = search.query(schema, query(command).orNull)
- val a = after(search.paging, command)
- val s = sort(search)
- val n = command.limits.count
+ tracer.trace("Building Lucene search")
+ val search = expressionMapper.search(command)
+ val query = search.query(schema, this.query(command).orNull)
+ val after = this.after(search.paging, command)
+ val sort = this.sort(search)
+ val count = command.limits.count
// Refresh if required
if (search.refresh) {
- Tracer.trace("Refreshing Lucene index searcher")
+ tracer.trace("Refreshing Lucene index searcher")
refresh()
}
// Search
- Tracer.trace(s"Lucene index searching for $n rows")
- val documents = lucene.search(a, q, s, n)
+ tracer.trace(s"Lucene index searching for $count rows")
+ val partitions = partitioner.partitions(command)
+ val documents = lucene.search(partitions, after, query, sort, count)
reader(documents, command, controller)
}
- def search(command: ReadCommand): Search = {
- SearchBuilder.fromJson(expression(command)).build
- }
-
- def search(group: SinglePartitionReadCommand.Group): Search = {
- SearchBuilder.fromJson(expression(group)).build
- }
-
- def expression(command: ReadCommand): String = {
- command.rowFilter.getExpressions.collect {
- case e: CustomExpression if name == e.getTargetIndex.name => e.getValue
- case e if supportsExpression(e) => e.getIndexValue
- }.map(UTF8Type.instance.compose).head
- }
-
- def expression(group: SinglePartitionReadCommand.Group): String = {
- expression(group.commands.head)
- }
-
/** Returns the key range query represented by the specified read command.
*
* @param command the read command
@@ -413,45 +339,15 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
def sort(search: Search): Sort = {
val sortFields = mutable.ListBuffer[SortField]()
if (search.usesSorting) {
- sortFields.addAll(search.sortFields(schema))
+ sortFields ++= search.sortFields(schema).asScala
}
if (search.usesRelevance) {
- sortFields.add(SortField.FIELD_SCORE)
+ sortFields += SortField.FIELD_SCORE
}
- sortFields.addAll(keySortFields)
+ sortFields ++= keySortFields
new Sort(sortFields.toArray: _*)
}
- /** Retrieves from the local storage the rows in the specified partition slice.
- *
- * @param key the partition key
- * @param clusterings the clustering keys
- * @param nowInSec max allowed time in seconds
- * @return a row iterator
- */
- def read(
- key: DecoratedKey,
- clusterings: java.NavigableSet[Clustering],
- nowInSec: Int): UnfilteredRowIterator = {
- val filter = new ClusteringIndexNamesFilter(clusterings, false)
- val columnFilter = ColumnFilter.all(metadata)
- val command = SinglePartitionReadCommand.create(metadata, nowInSec, key, columnFilter, filter)
- val controller = command.executionController()
- try command.queryMemtableAndDisk(table, controller) finally controller.close()
- }
-
- /** Retrieves from the local storage all the rows in the specified partition.
- *
- * @param key the partition key
- * @param nowInSec max allowed time in seconds
- * @return a row iterator
- */
- def read(key: DecoratedKey, nowInSec: Int): UnfilteredRowIterator = {
- val clusterings = new java.TreeSet[Clustering](metadata.comparator)
- clusterings.add(Clustering.EMPTY)
- read(key, clusterings, nowInSec)
- }
-
/** Reads from the local SSTables the rows identified by the specified search.
*
* @param documents the Lucene documents
@@ -464,165 +360,51 @@ abstract class IndexService(val table: ColumnFamilyStore, val indexMetadata: Ind
command: ReadCommand,
controller: ReadExecutionController): IndexReader
- /** Post processes in the coordinator node the results of a distributed search. In other words,
- * gets the k globally best results from all the k best node-local results.
- *
- * @param partitions the node results iterator
- * @param group the read command group
- * @return the k globally best results
- */
- def postProcess(
- partitions: PartitionIterator,
- group: SinglePartitionReadCommand.Group): PartitionIterator = {
- if (group.commands.size <= 1) return partitions //Only one partition is involved
- postProcess(partitions, search(group), group.limits.count, group.nowInSec)
- }
-
- /** Post processes in the coordinator node the results of a distributed search. In other words,
- * gets the k globally best results from all the k best node-local results.
- *
- * @param partitions the node results iterator
- * @param command the read command
- * @return the k globally best results
- */
- def postProcess(
- partitions: PartitionIterator,
- command: ReadCommand): PartitionIterator = command match {
- case c: SinglePartitionReadCommand => partitions
- case _ => postProcess(partitions, search(command), command.limits.count, command.nowInSec)
- }
-
- def postProcess(
- partitions: PartitionIterator,
- search: Search,
- limit: Int,
- nowInSec: Int): PartitionIterator = {
- if (search.requiresFullScan) {
- val rows = collect(partitions)
- if (search.requiresPostProcessing && rows.nonEmpty) {
- return merge(search, limit, nowInSec, rows)
- }
- }
- partitions
- }
-
- def collect(partitions: PartitionIterator): Seq[(DecoratedKey, SimpleRowIterator)] = {
- val rows = new java.LinkedList[(DecoratedKey, SimpleRowIterator)]
- val time = TimeCounter.create.start
- for (partition <- partitions) {
- try {
- val key = partition.partitionKey
- while (partition.hasNext) {
- rows.add((key, new SimpleRowIterator(partition)))
- }
- } finally partition.close()
- }
- logger.debug(s"Collected ${rows.size} rows in ${time.stop}")
- rows
- }
-
- def merge(
- search: Search,
- limit: Int,
- nowInSec: Int,
- rows: Seq[(DecoratedKey, SimpleRowIterator)]): PartitionIterator = {
-
- val time = TimeCounter.create.start
- val field = "_id"
- val index = new RAMIndex(schema.analyzer)
- try {
-
- // Index collected rows in memory
- for (id <- rows.indices) {
- val (key, rowIterator) = rows(id)
- val row = rowIterator.row
- val doc = document(key, row, search)
- doc.add(new StoredField(field, id)) // Mark document
- index.add(doc)
- }
-
- // Repeat search to sort partial results
- val docs = index.search(search.postProcessingQuery(schema), sort(search), limit, Set(field))
-
- // Collect and decorate
- val merged = for ((doc, score) <- docs) yield {
- val id = doc.get(field).toInt
- val rowIterator = rows.get(id)._2
- rowIterator.decorated(row => decorate(row, score, nowInSec))
- }
-
- Tracer.trace(s"Lucene post-process ${rows.size} collected rows to ${merged.size} rows")
- logger.debug(s"Post-processed ${rows.size} rows to ${merged.size} rows in ${time.stop}")
- new SimplePartitionIterator(merged)
-
- } finally index.close()
- }
-
- def decorate(row: Row, score: ScoreDoc, nowInSec: Int): Row = {
-
- // Skip if there is no base column or score
- if (columnDefinition.isEmpty) return row
-
- // Copy row
- val builder = BTreeRow.unsortedBuilder(nowInSec)
- builder.newRow(row.clustering)
- builder.addRowDeletion(row.deletion)
- builder.addPrimaryKeyLivenessInfo(row.primaryKeyLivenessInfo)
- row.cells.foreach(builder.addCell)
-
- // Add score cell
- val timestamp = row.primaryKeyLivenessInfo.timestamp
- val scoreCellValue = UTF8Type.instance.decompose(score.score.toString)
- builder.addCell(BufferCell.live(columnDefinition.get, timestamp, scoreCellValue))
-
- builder.build
- }
-
/** Ensures that values present in a partition update are valid according to the schema.
*
* @param update the partition update containing the values to be validated
*/
def validate(update: PartitionUpdate) {
val key = update.partitionKey
- update.foreach(row => schema.validate(columns(key, row)))
+ val now = FBUtilities.nowInSeconds
+ update.forEach(row => schema.validate(columnsMapper.columns(key, row, now)))
}
- /** @inheritdoc*/
+ /** @inheritdoc */
override def commit() {
queue.submitSynchronous(lucene.commit)
}
- /** @inheritdoc*/
- override def getNumDocs: Int = {
+ /** @inheritdoc */
+ override def getNumDocs: Long = {
lucene.getNumDocs
}
- /** @inheritdoc*/
- override def getNumDeletedDocs: Int = {
+ /** @inheritdoc */
+ override def getNumDeletedDocs: Long = {
lucene.getNumDeletedDocs
}
- /** @inheritdoc*/
+ /** @inheritdoc */
override def forceMerge(maxNumSegments: Int, doWait: Boolean) {
queue.submitSynchronous(() => lucene.forceMerge(maxNumSegments, doWait))
}
- /** @inheritdoc*/
+ /** @inheritdoc */
override def forceMergeDeletes(doWait: Boolean) {
queue.submitSynchronous(() => lucene.forceMergeDeletes(doWait))
}
- /** @inheritdoc*/
+ /** @inheritdoc */
override def refresh() {
queue.submitSynchronous(lucene.refresh)
}
}
+/** Companion object for [[IndexService]]. */
object IndexService {
- val logger = LoggerFactory.getLogger(classOf[IndexService])
-
/** Returns a new index service for the specified indexed table and index metadata.
*
* @param table the indexed table
@@ -637,12 +419,4 @@ object IndexService {
}
}
- def indexedColumn(indexMetadata: IndexMetadata): Option[String] = {
- Option(indexMetadata.options.get(IndexTarget.TARGET_OPTION_NAME)).filterNot(StringUtils.isBlank)
- }
-
- def getColumnDefinition(metadata: CFMetaData, name: Option[String]): Option[ColumnDefinition] = {
- name.flatMap(name => metadata.allColumns.find(_.name.toString == name))
- }
-
}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceMBean.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceMBean.scala
index 8a7e4f918..d006ea330 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceMBean.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceMBean.scala
@@ -28,13 +28,13 @@ trait IndexServiceMBean {
*
* @return the number of documents
*/
- def getNumDocs: Int
+ def getNumDocs: Long
/** Returns the total number of deleted documents in this index.
*
* @return the number of deleted documents
*/
- def getNumDeletedDocs: Int
+ def getNumDeletedDocs: Long
/** Optimizes the index forcing merge segments leaving the specified number of segments. This
* operation may block until all merging completes.
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceSkinny.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceSkinny.scala
index fbe9287f6..c9dba97e8 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceSkinny.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceSkinny.scala
@@ -15,13 +15,12 @@
*/
package com.stratio.cassandra.lucene
-import com.stratio.cassandra.lucene.column.{Columns, ColumnsMapper}
+import com.google.common.collect.Sets
import com.stratio.cassandra.lucene.index.DocumentIterator
-import com.stratio.cassandra.lucene.key.PartitionMapper
+import com.stratio.cassandra.lucene.mapping.PartitionMapper
import org.apache.cassandra.db.PartitionPosition.Kind._
import org.apache.cassandra.db._
import org.apache.cassandra.db.filter.ClusteringIndexFilter
-import org.apache.cassandra.db.rows.Row
import org.apache.cassandra.index.transactions.IndexTransaction
import org.apache.cassandra.schema.IndexMetadata
import org.apache.cassandra.utils.concurrent.OpOrder
@@ -35,13 +34,13 @@ import org.apache.lucene.search.{Query, SortField, TermQuery}
* @author Andres de la Pena `adelapena@stratio.com`
*/
class IndexServiceSkinny(table: ColumnFamilyStore, index: IndexMetadata)
- extends IndexService(table, index) {
+ extends IndexService(table, index) {
init()
/** @inheritdoc */
- override def fieldsToLoad: Set[String] = {
- Set(PartitionMapper.FIELD_NAME)
+ override def fieldsToLoad: java.util.Set[String] = {
+ Sets.newHashSet(PartitionMapper.FIELD_NAME)
}
/** @inheritdoc */
@@ -59,17 +58,13 @@ class IndexServiceSkinny(table: ColumnFamilyStore, index: IndexMetadata)
}
/** @inheritdoc */
- override def columns(key: DecoratedKey, row: Row): Columns = {
- Columns() + partitionMapper.columns(key) + ColumnsMapper.columns(row)
- }
-
- /** @inheritdoc */
- override def keyIndexableFields(key: DecoratedKey, row: Row): List[IndexableField] = {
+ override def keyIndexableFields(key: DecoratedKey, clustering: Clustering)
+ : List[IndexableField] = {
List(tokenMapper.indexableField(key), partitionMapper.indexableField(key))
}
/** @inheritdoc */
- override def term(key: DecoratedKey, row: Row): Term = {
+ override def term(key: DecoratedKey, clustering: Clustering): Term = {
partitionMapper.term(key)
}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceWide.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceWide.scala
index 96f6c7c27..cc3c01f2d 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceWide.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexServiceWide.scala
@@ -15,16 +15,13 @@
*/
package com.stratio.cassandra.lucene
-import java.{util => java}
-
-import com.stratio.cassandra.lucene.column.{Columns, ColumnsMapper}
+import com.google.common.collect.Sets
import com.stratio.cassandra.lucene.index.DocumentIterator
-import com.stratio.cassandra.lucene.key.ClusteringMapper._
-import com.stratio.cassandra.lucene.key.{ClusteringMapper, KeyMapper, PartitionMapper}
+import com.stratio.cassandra.lucene.mapping.ClusteringMapper._
+import com.stratio.cassandra.lucene.mapping.{ClusteringMapper, KeyMapper, PartitionMapper}
import org.apache.cassandra.db.PartitionPosition.Kind._
import org.apache.cassandra.db._
import org.apache.cassandra.db.filter._
-import org.apache.cassandra.db.rows.Row
import org.apache.cassandra.index.transactions.IndexTransaction
import org.apache.cassandra.schema.IndexMetadata
import org.apache.cassandra.utils.concurrent.OpOrder
@@ -33,7 +30,7 @@ import org.apache.lucene.index.{IndexableField, Term}
import org.apache.lucene.search.BooleanClause.Occur._
import org.apache.lucene.search.{BooleanQuery, Query, SortField}
-import scala.collection.JavaConversions._
+import scala.collection.mutable
/** [[IndexService]] for wide rows.
*
@@ -50,8 +47,8 @@ class IndexServiceWide(table: ColumnFamilyStore, index: IndexMetadata)
init()
/** @inheritdoc */
- override def fieldsToLoad: Set[String] = {
- Set(PartitionMapper.FIELD_NAME, ClusteringMapper.FIELD_NAME)
+ override def fieldsToLoad: java.util.Set[String] = {
+ Sets.newHashSet(PartitionMapper.FIELD_NAME, ClusteringMapper.FIELD_NAME)
}
/** @inheritdoc */
@@ -78,41 +75,24 @@ class IndexServiceWide(table: ColumnFamilyStore, index: IndexMetadata)
}
/** @inheritdoc */
- override def columns(key: DecoratedKey, row: Row): Columns = {
- Columns()
- .add(partitionMapper.columns(key))
- .add(clusteringMapper.columns(row.clustering))
- .add(ColumnsMapper.columns(row))
- }
-
- /** @inheritdoc */
- override def keyIndexableFields(key: DecoratedKey, row: Row): List[IndexableField] = {
- val clustering = row.clustering
- val fields = new java.LinkedList[IndexableField]()
- fields.add(tokenMapper.indexableField(key))
- fields.add(partitionMapper.indexableField(key))
- fields.add(keyMapper.indexableField(key, clustering))
- fields.addAll(clusteringMapper.indexableFields(key, clustering))
+ override def keyIndexableFields(key: DecoratedKey, clustering: Clustering)
+ : List[IndexableField] = {
+ val fields = mutable.ListBuffer.empty[IndexableField]
+ fields += tokenMapper.indexableField(key)
+ fields += partitionMapper.indexableField(key)
+ fields += keyMapper.indexableField(key, clustering)
+ fields ++= clusteringMapper.indexableFields(key, clustering)
fields.toList
}
/** @inheritdoc */
- override def term(key: DecoratedKey, row: Row): Term = term(key, row.clustering)
-
- /** Returns a Lucene term identifying the document representing the row identified by the
- * specified partition and clustering keys.
- *
- * @param key the partition key
- * @param clustering the clustering key
- * @return the term identifying the document
- */
def term(key: DecoratedKey, clustering: Clustering): Term = {
keyMapper.term(key, clustering)
}
/** @inheritdoc */
override def query(key: DecoratedKey, filter: ClusteringIndexFilter): Query = filter match {
- case f if f.selectsAllPartition() => partitionMapper.query(key)
+ case f if f.selectsAllPartition => partitionMapper.query(key)
case f: ClusteringIndexNamesFilter => keyMapper.query(key, f)
case f: ClusteringIndexSliceFilter => clusteringMapper.query(key, f)
case _ => throw new IndexException(s"Unknown filter type $filter")
@@ -170,7 +150,7 @@ class IndexServiceWide(table: ColumnFamilyStore, index: IndexMetadata)
// Return query, or empty if there are no restrictions
val booleanQuery = builder.build
- if (booleanQuery.clauses.nonEmpty) Some(booleanQuery) else None
+ if (booleanQuery.clauses.isEmpty) None else Some(booleanQuery)
}
/** @inheritdoc */
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriter.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriter.scala
index d0b862de0..67b04d50c 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriter.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriter.scala
@@ -15,12 +15,13 @@
*/
package com.stratio.cassandra.lucene
-import org.apache.cassandra.db.rows.Row
-import org.apache.cassandra.db.{DecoratedKey, DeletionTime, RangeTombstone}
+import com.stratio.cassandra.lucene.util.{Logging, Tracing}
+import org.apache.cassandra.db._
+import org.apache.cassandra.db.filter.{ClusteringIndexNamesFilter, ColumnFilter}
+import org.apache.cassandra.db.rows.{Row, UnfilteredRowIterator}
+import org.apache.cassandra.index.Index.Indexer
import org.apache.cassandra.index.transactions.IndexTransaction
import org.apache.cassandra.utils.concurrent.OpOrder
-import org.slf4j.LoggerFactory
-import org.apache.cassandra.index.Index.Indexer
/** [[Indexer]] for Lucene-based index.
*
@@ -31,42 +32,44 @@ import org.apache.cassandra.index.Index.Indexer
* @param transactionType what kind of update is being performed on the base data
* @author Andres de la Pena `adelapena@stratio.com`
*/
-abstract class IndexWriter(service: IndexService,
- key: DecoratedKey,
- nowInSec: Int,
- opGroup: OpOrder.Group,
- transactionType: IndexTransaction.Type) extends Indexer {
+abstract class IndexWriter(
+ service: IndexService,
+ key: DecoratedKey,
+ nowInSec: Int,
+ opGroup: OpOrder.Group,
+ transactionType: IndexTransaction.Type) extends Indexer with Logging with Tracing {
- protected val logger = LoggerFactory.getLogger(classOf[IndexWriter])
+ val metadata = service.metadata
+ val table = service.table
- /** @inheritdoc */
+ /** @inheritdoc*/
override def begin() {
}
- /** @inheritdoc */
+ /** @inheritdoc*/
override def partitionDelete(deletionTime: DeletionTime) {
logger.trace(s"Delete partition during $transactionType: $deletionTime")
delete()
}
- /** @inheritdoc */
+ /** @inheritdoc*/
override def rangeTombstone(tombstone: RangeTombstone) {
logger.trace(s"Range tombstone during $transactionType: $tombstone")
}
- /** @inheritdoc */
+ /** @inheritdoc*/
override def insertRow(row: Row): Unit = {
logger.trace(s"Insert rows during $transactionType: $row")
index(row)
}
- /** @inheritdoc */
+ /** @inheritdoc*/
override def updateRow(oldRowData: Row, newRowData: Row): Unit = {
logger.trace(s"Update row during $transactionType: $oldRowData TO $newRowData")
index(newRowData)
}
- /** @inheritdoc */
+ /** @inheritdoc*/
override def removeRow(row: Row): Unit = {
logger.trace(s"Remove row during $transactionType: $row")
index(row)
@@ -80,4 +83,37 @@ abstract class IndexWriter(service: IndexService,
* @param row the row to be indexed.
*/
protected def index(row: Row)
-}
+
+ /** Retrieves from the local storage all the rows in the specified partition.
+ *
+ * @param key the partition key
+ * @return a row iterator
+ */
+ protected def read(key: DecoratedKey): UnfilteredRowIterator = {
+ read(SinglePartitionReadCommand.fullPartitionRead(metadata, nowInSec, key))
+ }
+
+ /** Retrieves from the local storage the rows in the specified partition slice.
+ *
+ * @param key the partition key
+ * @param clusterings the clustering keys
+ * @return a row iterator
+ */
+ protected def read(key: DecoratedKey, clusterings: java.util.NavigableSet[Clustering])
+ : UnfilteredRowIterator = {
+ val filter = new ClusteringIndexNamesFilter(clusterings, false)
+ val columnFilter = ColumnFilter.all(metadata)
+ read(SinglePartitionReadCommand.create(metadata, nowInSec, key, columnFilter, filter))
+ }
+
+ /** Retrieves from the local storage the rows satisfying the specified read command.
+ *
+ * @param command a single partition read command
+ * @return a row iterator
+ */
+ protected def read(command: SinglePartitionReadCommand): UnfilteredRowIterator = {
+ val controller = command.executionController
+ try command.queryMemtableAndDisk(table, controller) finally controller.close()
+ }
+
+}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterSkinny.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterSkinny.scala
index 487ae3cb2..50f0f3040 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterSkinny.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterSkinny.scala
@@ -15,7 +15,6 @@
*/
package com.stratio.cassandra.lucene
-import com.stratio.cassandra.lucene.util.Tracer
import org.apache.cassandra.db.DecoratedKey
import org.apache.cassandra.db.rows.Row
import org.apache.cassandra.index.transactions.IndexTransaction
@@ -31,11 +30,12 @@ import org.apache.cassandra.utils.concurrent.OpOrder
* @param transactionType what kind of update is being performed on the base data
* @author Andres de la Pena `adelapena@stratio.com`
*/
-class IndexWriterSkinny(service: IndexServiceSkinny,
- key: DecoratedKey,
- nowInSec: Int,
- opGroup: OpOrder.Group,
- transactionType: IndexTransaction.Type)
+class IndexWriterSkinny(
+ service: IndexServiceSkinny,
+ key: DecoratedKey,
+ nowInSec: Int,
+ opGroup: OpOrder.Group,
+ transactionType: IndexTransaction.Type)
extends IndexWriter(service, key, nowInSec, opGroup, transactionType) {
private var row: Option[Row] = None
@@ -53,22 +53,26 @@ class IndexWriterSkinny(service: IndexServiceSkinny,
/** @inheritdoc */
override def finish() {
- if (transactionType != CLEANUP) {
- row.map(row => {
+
+ // Skip on cleanups
+ if (transactionType == CLEANUP) return
+
+ row.map(
+ row => {
if (transactionType == COMPACTION || service.needsReadBeforeWrite(key, row)) {
- Tracer.trace("Lucene index reading before write")
- val iterator = service.read(key, nowInSec)
+ tracer.trace("Lucene index reading before write")
+ val iterator = read(key)
if (iterator.hasNext) iterator.next.asInstanceOf[Row] else row
} else row
- }).foreach(row => {
+ }).foreach(
+ row => {
if (row.hasLiveData(nowInSec)) {
- Tracer.trace("Lucene index writing document")
+ tracer.trace("Lucene index writing document")
service.upsert(key, row, nowInSec)
} else {
- Tracer.trace("Lucene index deleting document")
+ tracer.trace("Lucene index deleting document")
service.delete(key)
}
})
- }
}
}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterWide.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterWide.scala
index 9bd8e6a4c..ca948ec30 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterWide.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/IndexWriterWide.scala
@@ -15,14 +15,14 @@
*/
package com.stratio.cassandra.lucene
-import com.stratio.cassandra.lucene.util.Tracer
import org.apache.cassandra.db.rows.Row
import org.apache.cassandra.db.{Clustering, DecoratedKey}
import org.apache.cassandra.index.transactions.IndexTransaction
+import org.apache.cassandra.index.transactions.IndexTransaction.Type._
import org.apache.cassandra.utils.concurrent.OpOrder
-import scala.collection.JavaConversions._
-import java.{util => java}
+import scala.collection.JavaConverters._
+
/** [[IndexWriter]] for wide rows.
*
@@ -33,20 +33,24 @@ import java.{util => java}
* @param transactionType what kind of update is being performed on the base data
* @author Andres de la Pena `adelapena@stratio.com`
*/
-class IndexWriterWide(service: IndexServiceWide,
- key: DecoratedKey,
- nowInSec: Int,
- opGroup: OpOrder.Group,
- transactionType: IndexTransaction.Type)
+class IndexWriterWide(
+ service: IndexServiceWide,
+ key: DecoratedKey,
+ nowInSec: Int,
+ opGroup: OpOrder.Group,
+ transactionType: IndexTransaction.Type)
extends IndexWriter(service, key, nowInSec, opGroup, transactionType) {
- private val rowsToRead = new java.TreeSet[Clustering](service.metadata.comparator)
- private val rows = new java.LinkedHashMap[Clustering, Option[Row]]
+ /** The clustering keys of the rows needing read before write. */
+ private val clusterings = new java.util.TreeSet[Clustering](metadata.comparator)
+
+ /** The rows ready to be written. */
+ private val rows = new java.util.TreeMap[Clustering, Row](metadata.comparator)
/** @inheritdoc */
override def delete() {
service.delete(key)
- rowsToRead.clear()
+ clusterings.clear()
rows.clear()
}
@@ -55,12 +59,11 @@ class IndexWriterWide(service: IndexServiceWide,
if (!row.isStatic) {
val clustering = row.clustering
if (service.needsReadBeforeWrite(key, row)) {
- Tracer.trace("Lucene index doing read before write")
- rowsToRead.add(clustering)
- rows.put(clustering, None)
+ tracer.trace("Lucene index doing read before write")
+ clusterings.add(clustering)
} else {
- Tracer.trace("Lucene index skipping read before write")
- rows.put(clustering, Some(row))
+ tracer.trace("Lucene index skipping read before write")
+ rows.put(clustering, row)
}
}
}
@@ -69,26 +72,24 @@ class IndexWriterWide(service: IndexServiceWide,
override def finish() {
// Skip on cleanups
- if (transactionType == IndexTransaction.Type.CLEANUP) return
+ if (transactionType == CLEANUP) return
// Read required rows from storage engine
- service.read(key, rowsToRead, nowInSec).foreach(unfiltered => {
- val row = unfiltered.asInstanceOf[Row]
- rows.put(row.clustering(), Some(row))
- })
+ read(key, clusterings)
+ .asScala
+ .map(_.asInstanceOf[Row])
+ .foreach(row => rows.put(row.clustering(), row))
// Write rows
- for ((clustering, maybeRow) <- rows) {
- maybeRow.foreach(row => {
- if (row.hasLiveData(nowInSec)) {
- Tracer.trace("Lucene index writing document")
- service.upsert(key, row, nowInSec)
- } else {
- Tracer.trace("Lucene index deleting document")
- service.delete(key, row)
- }
- })
- }
+ rows.forEach((clustering, row) => {
+ if (row.hasLiveData(nowInSec)) {
+ tracer.trace("Lucene index writing document")
+ service.upsert(key, row, nowInSec)
+ } else {
+ tracer.trace("Lucene index deleting document")
+ service.delete(key, clustering)
+ }
+ })
}
-}
+}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Column.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Column.scala
index 66bed5b63..78ca8cfe6 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Column.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Column.scala
@@ -15,102 +15,112 @@
*/
package com.stratio.cassandra.lucene.column
+import java.nio.ByteBuffer
+import java.util.Date
import java.util.regex.Pattern
import com.google.common.base.MoreObjects
import com.stratio.cassandra.lucene.IndexException
+import com.stratio.cassandra.lucene.column.Column._
+import org.apache.cassandra.db.marshal.{AbstractType, SimpleDateType}
import org.apache.commons.lang3.StringUtils
+import org.apache.commons.lang3.StringUtils.EMPTY
/** A cell of a CQL3 logic column, which in most cases is different from a storage engine column.
*
- * @param cellName the name of the base cell
- * @param udtNames the UDT fields
- * @param mapNames the map keys
- * @param deletionTime the deletion time in seconds
- * @param value the optional value
- * @tparam A the value type
+ * @param cell the name of the base cell
+ * @param udt the UDT suffix
+ * @param map the map suffix
+ * @param value the optional value
* @author Andres de la Pena `adelapena@stratio.com`
*/
-case class Column[A](cellName: String,
- udtNames: List[String] = Nil,
- mapNames: List[String] = Nil,
- deletionTime: Int = Column.NO_DELETION_TIME,
- value: Option[A] = None) {
+case class Column(cell: String,
+ udt: String = EMPTY,
+ map: String = EMPTY,
+ value: Option[_] = None) {
- if (StringUtils.isBlank(cellName)) throw new IndexException("Cell name shouldn't be blank")
+ if (StringUtils.isBlank(cell)) throw new IndexException("Cell name shouldn't be blank")
- private[this] lazy val udtSuffix = udtNames.foldLeft("")((a, n) => a + Column.UDT_SEPARATOR + n)
- private[this] lazy val mapSuffix = mapNames.foldLeft("")((a, n) => a + Column.MAP_SEPARATOR + n)
+ /** The columns mapper name, composed by cell name and UDT names, without map names. */
+ lazy val mapper: String = cell.concat(udt)
/** The columns field name, composed by cell name, UDT names and map names. */
- lazy val fieldName = cellName + udtSuffix + mapSuffix
+ lazy val field: String = mapper.concat(map)
- /** The columns mapper name, composed by cell name and UDT names, without map names. */
- lazy val mapperName = cellName + udtSuffix
+ /** Returns `true` if the value is not defined, `false` otherwise. */
+ def isEmpty: Boolean = value.isEmpty
- lazy val mapperNames:List[String] = cellName :: udtNames
+ /** Returns the value, or null if it is not defined. */
+ def valueOrNull: Any = value.orNull
/** Returns a copy of this with the specified name appended to the list of UDT names. */
- def withUDTName(name: String): Column[_] =
- copy(udtNames = udtNames :+ name)
+ def withUDTName(name: String): Column = copy(udt = udt + UDT_SEPARATOR + name)
/** Returns a copy of this with the specified name appended to the list of map names. */
- def withMapName(name: String): Column[_] =
- copy(mapNames = mapNames :+ name)
-
- /** Returns a copy of this with the specified deletion UNIX time in seconds. */
- def withDeletionTime(deletionTime: Int): Column[_] =
- copy(deletionTime = deletionTime)
+ def withMapName(name: String): Column = copy(map = map + MAP_SEPARATOR + name)
/** Returns a copy of this with the specified value. */
- def withValue[B](value: B): Column[B] =
- copy(value = Option(value))
+ def withValue[B](value: B): Column = copy(value = Option(value))
- /** Returns the name for fields. */
- def fieldName(field: String): String =
- field + mapSuffix
+ /** Returns a copy of this with the specified decomposed value. */
+ def withValue(bb: ByteBuffer, t: AbstractType[_]): Column = withValue(compose(bb, t))
- /** Returns if this is a deletion at the specified UNIX timestamp in seconds. */
- def isDeleted(timeInSec: Int): Boolean =
- value.isEmpty || deletionTime <= timeInSec
+ /** Returns the name for fields. */
+ def fieldName(field: String): String = field.concat(map)
/** Returns a [[Columns]] composed by this and the specified column. */
- def +(column: Column[_]): Columns =
- Columns(this, column)
+ def +(column: Column): Columns = Columns(this, column)
/** Returns a [[Columns]] composed by this and the specified columns. */
- def +(columns: Columns): Columns =
- Columns(this) + columns
+ def +(columns: Columns): Columns = this :: columns
/** @inheritdoc */
override def toString: String =
MoreObjects.toStringHelper(this)
- .add("cell", cellName)
- .add("name", fieldName)
+ .add("cell", cell)
+ .add("field", field)
.add("value", value)
- .add("deletionTime", deletionTime)
.toString
}
+/** Companion object for [[Column]]. */
object Column {
- val NO_DELETION_TIME = Int.MaxValue
-
private val UDT_SEPARATOR = "."
private val MAP_SEPARATOR = "$"
private[this] val UDT_PATTERN = Pattern.quote(UDT_SEPARATOR)
- private[this] val MAP_PATTERN = Pattern.quote(MAP_SEPARATOR)
-
- def apply(cellName: String): Column[_] =
- new Column(cellName = cellName)
-
- def parse(name: String): Column[_] = {
- val x = name.split(MAP_PATTERN)
- val mapNames = x.drop(1).toList
- val y = x.head.split(UDT_PATTERN)
- val cellName = y.head
- val udtNames = y.drop(1).toList
- new Column(cellName, udtNames, mapNames)
+
+ def apply(cell: String): Column = new Column(cell = cell)
+
+ def parseCellName(name: String): String = {
+ val udtSuffixStart = name.indexOf(UDT_SEPARATOR)
+ if (udtSuffixStart < 0) {
+ val mapSuffixStart = name.indexOf(MAP_SEPARATOR)
+ if (mapSuffixStart < 0) name else name.substring(0, mapSuffixStart)
+ } else name.substring(0, udtSuffixStart)
+ }
+
+ def parseMapperName(name: String): String = {
+ val mapSuffixStart = name.indexOf(MAP_SEPARATOR)
+ if (mapSuffixStart < 0) name else name.substring(0, mapSuffixStart)
+ }
+
+ def parseUdtNames(name: String): List[String] = {
+ val udtSuffixStart = name.indexOf(UDT_SEPARATOR)
+ if (udtSuffixStart < 0) Nil else {
+ val mapSuffixStart = name.indexOf(MAP_SEPARATOR)
+ val udtSuffix = if (mapSuffixStart < 0) {
+ name.substring(udtSuffixStart + 1)
+ } else {
+ name.substring(udtSuffixStart + 1, mapSuffixStart)
+ }
+ udtSuffix.split(UDT_PATTERN).toList
+ }
+ }
+
+ def compose(bb: ByteBuffer, t: AbstractType[_]): Any = t match {
+ case sdt: SimpleDateType => new Date(sdt.toTimeInMillis(bb))
+ case _ => t.compose(bb)
}
}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Columns.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Columns.scala
index e5b168744..10edeb49c 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Columns.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/column/Columns.scala
@@ -15,79 +15,68 @@
*/
package com.stratio.cassandra.lucene.column
-import com.google.common.base.MoreObjects
+import com.google.common.base.MoreObjects.toStringHelper
-/** A sorted list of CQL3 logic [[Column]]s.
+/** An immutable sorted list of CQL3 logic [[Column]]s.
*
* @param columns the [[Column]]s composing this
* @author Andres de la Pena `adelapena@stratio.com`
*/
@scala.annotation.varargs
-case class Columns(columns: Column[_]*)
- extends Traversable[Column[_]]
- with java.lang.Iterable[Column[_]] {
-
- /** @constructor create a new columns with a list of columns. */
- def this(columns: Traversable[Column[_]]) = this(columns.toArray: _*)
+case class Columns(private val columns: List[Column]) extends Traversable[Column] {
/** @constructor create a new empty columns. */
- def this() = this(Array[Column[_]]())
+ def this() = this(Nil)
- override def foreach[U](f: Column[_] => U) = columns.foreach(f)
+ /** @inheritdoc */
+ override def isEmpty: Boolean = columns.isEmpty
- override def iterator: java.util.Iterator[Column[_]] = {
- import collection.JavaConversions._
- columns.iterator
- }
+ /** @inheritdoc */
+ override def foreach[A](f: Column => A): Unit = columns.foreach(f)
- /** Returns a copy of this with the specified column appended. */
- def +(column: Column[_]): Columns =
- new Columns(columns :+ column)
+ /** Returns a copy of this with the specified column prepended in O(1) time. */
+ def ::(column: Column): Columns = new Columns(column :: columns)
- /** Returns a copy of this with the specified columns appended. */
- def +(columns: Columns): Columns =
- new Columns(this.columns ++ columns)
+ /** Returns a copy of this with the specified column appended in O(n) time. */
+ def +(column: Column): Columns = new Columns(columns :+ column)
- override def head: Column[_] =
- if (columns.isEmpty) null else columns.head
+ /** Returns a copy of this with the specified columns appended. */
+ def ++(columns: Columns): Columns = new Columns(this.columns ++ columns)
- /** Returns copy of this with only the columns with the specified full name. */
- def withFieldName(name: String): Columns =
- new Columns(filter(_.fieldName == name))
+ /** Returns the value of the first column with the specifed mapper name. */
+ def valueForField(field: String): Any = columns.find(_.field == field).flatMap(_.value).orNull
- /** Returns copy of this with only the columns with the specified cell name. */
- def withCellName(name: String): Columns = {
- lazy val cellName = Column.parse(name).cellName
- new Columns(filter(_.cellName == cellName))
+ /** Runs the specified function over each column with the specified field name. */
+ def foreachWithMapper[A](field: String)(f: Column => A): Unit = {
+ val mapper = Column.parseMapperName(field)
+ columns.foreach(column => if (column.mapper == mapper) f(column))
}
- /** Returns copy of this with only the columns with the specified mapper name. */
- def withMapperName(name: String): Columns = {
- lazy val mapperName = Column.parse(name).mapperName
- new Columns(filter(_.mapperName == mapperName))
- }
+ /** Returns a copy of this with the specified column appended. */
+ def add(cell: String): Columns = this + Column(cell)
+
+ /** Returns a copy of this with the specified column appended. */
+ def add(cell: String, value: Any): Columns = this + Column(cell, value = Option(value))
+
+ /** @inheritdoc */
+ override def toString: String = (toStringHelper(this) /: columns) ((helper, column) =>
+ helper.add(column.field, column.value)).toString
- /** Returns copy of this without the columns deleted at the specified time.
- *
- * @param timeInSec the max allowed UNIX time in seconds
- */
- def withoutDeleted(timeInSec: Int): Columns =
- new Columns(filterNot(_.isDeleted(timeInSec)))
+}
- def add[A](column: Column[A]) =
- this + column
+/** Companion object for [[Columns]]. */
+object Columns {
- def add[A](columns: Columns) =
- this + columns
+ /** An empty columns. */
+ val empty: Columns = new Columns
- def add[A](cellName: String) =
- this + Column(cellName)
+ /** Returns a new empty columns. */
+ def apply: Columns = empty
- def add[A](cellName: String, value: A) =
- this + Column(cellName, value = Option(value))
+ /** Returns a new [[Columns]] composed by the specified [[Column]]s. */
+ def apply(columns: Traversable[Column]): Columns = new Columns(columns.toList)
- override def toString: String =
- columns.foldLeft(MoreObjects.toStringHelper(this))(
- (helper, column) => helper.add(column.fieldName, column.value)).toString
+ /** Returns a new [[Columns]] composed by the specified [[Column]]s. */
+ def apply(columns: Column*): Columns = new Columns(columns.toList)
-}
\ No newline at end of file
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/column/ColumnsMapper.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/column/ColumnsMapper.scala
deleted file mode 100644
index 56bb14364..000000000
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/column/ColumnsMapper.scala
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
- * Copyright (C) 2014 Stratio (http://stratio.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.stratio.cassandra.lucene.column
-
-import java.math.{BigDecimal, BigInteger}
-import java.net.InetAddress
-import java.nio.ByteBuffer
-import java.util.{Date, UUID}
-
-import com.stratio.cassandra.lucene.IndexException
-import org.apache.cassandra.config.CFMetaData
-import org.apache.cassandra.db.marshal._
-import org.apache.cassandra.db.rows.{Cell, ComplexColumnData, Row}
-import org.apache.cassandra.serializers.CollectionSerializer
-import org.apache.cassandra.transport.Server._
-import org.apache.cassandra.utils.ByteBufferUtil
-
-import scala.annotation.tailrec
-import scala.collection.JavaConversions._
-
-/** Maps Cassandra rows to [[Columns]].
- *
- * @author Andres de la Pena `adelapena@stratio.com`
- */
-object ColumnsMapper {
-
- /** Returns a [[Columns]] representing the specified row.
- *
- * @param row the Cassandra row to be mapped
- */
- def columns(row: Row): Columns = {
- row.columns().foldLeft(Columns())(
- (cs, columnDefinition) =>
- if (columnDefinition.isComplex)
- cs + columns(row.getComplexColumnData(columnDefinition))
- else
- cs + columns(row.getCell(columnDefinition))
- )
- }
-
- private[column] def columns(complexColumnData: ComplexColumnData): Columns = {
- complexColumnData.foldLeft(Columns())((cs, cell) => cs + columns(cell))
- }
-
- private[column] def columns(cell: Cell): Columns = {
- if (cell == null) return Columns()
- val isTombstone = cell.isTombstone
- val name = cell.column.name.toString
- val comparator = cell.column.`type`
- val value = cell.value
- val column = new Column(cellName = name, deletionTime = cell.localDeletionTime)
- comparator match {
- case setType: SetType[_] if !setType.isFrozenCollection =>
- val itemComparator = setType.nameComparator
- val itemValue = cell.path.get(0)
- columns(isTombstone, column, itemComparator, itemValue)
- case listType: ListType[_] if !listType.isFrozenCollection =>
- val itemComparator = listType.valueComparator
- columns(isTombstone, column, itemComparator, value)
- case mapType: MapType[_, _] if !mapType.isFrozenCollection =>
- val itemComparator = mapType.valueComparator
- val keyValue = cell.path.get(0)
- val keyComparator = mapType.nameComparator
- val nameSuffix = keyComparator.compose(keyValue).toString
- columns(isTombstone, column.withMapName(nameSuffix), itemComparator, value)
- case userType: UserType =>
- val cellPath = cell.path()
- if (cellPath == null) {
- columns(isTombstone, column, comparator, value)
- } else {
- val position = ByteBufferUtil.toShort(cellPath.get(0))
- val name = userType.fieldNameAsString(position)
- val typo = userType.`type`(position)
- columns(isTombstone, column.withUDTName(name), typo, value)
- }
- case _ =>
- columns(isTombstone, column, comparator, value)
- }
- }
-
- private[column] def columns(
- isTombstone: Boolean,
- column: Column[_],
- abstractType: AbstractType[_],
- value: ByteBuffer): Columns = abstractType match {
- case setType: SetType[_] =>
- columns(isTombstone, column, setType, value)
- case listType: ListType[_] =>
- columns(isTombstone, column, listType, value)
- case mapType: MapType[_, _] =>
- columns(isTombstone, column, mapType, value)
- case userType: UserType =>
- columns(isTombstone, column, userType, value)
- case tupleType: TupleType =>
- columns(isTombstone, column, tupleType, value)
- case _ =>
- Columns(column.withValue(compose(value, abstractType)))
- }
-
- private[this] def columns(
- isTombstone: Boolean,
- column: Column[_],
- set: SetType[_],
- value: ByteBuffer): Columns = {
- if (isTombstone) return Columns(column)
- val nameType = set.nameComparator
- val bb = ByteBufferUtil.clone(value) // CollectionSerializer read functions are impure
- (0 until frozenCollectionSize(bb)).foldLeft(Columns())(
- (cs, n) => {
- val itemValue = frozenCollectionValue(bb)
- cs + columns(isTombstone, column, nameType, itemValue)
- })
- }
-
- private[this] def columns(
- isTombstone: Boolean,
- column: Column[_],
- list: ListType[_],
- value: ByteBuffer): Columns = {
- if (isTombstone) return Columns(column)
- val valueType = list.valueComparator
- val bb = ByteBufferUtil.clone(value) // CollectionSerializer read functions are impure
- (0 until frozenCollectionSize(bb)).foldLeft(Columns())(
- (cs, n) => {
- val itemValue = frozenCollectionValue(bb)
- cs + columns(isTombstone, column, valueType, itemValue)
- })
- }
-
- private[this] def columns(
- isTombstone: Boolean,
- column: Column[_],
- map: MapType[_, _],
- value: ByteBuffer): Columns = {
- if (isTombstone) return Columns(column)
- val itemKeysType = map.nameComparator
- val itemValuesType = map.valueComparator
- val bb = ByteBufferUtil.clone(value) // CollectionSerializer read functions are impure
- (0 until frozenCollectionSize(bb)).foldLeft(Columns())(
- (cs, n) => {
- val itemKey = frozenCollectionValue(bb)
- val itemValue = frozenCollectionValue(bb)
- val itemName = itemKeysType.compose(itemKey).toString
- cs + columns(isTombstone, column.withMapName(itemName), itemValuesType, itemValue)
- })
- }
-
- private[this] def columns(
- isTombstone: Boolean,
- column: Column[_],
- udt: UserType,
- value: ByteBuffer): Columns = {
- if (isTombstone) return Columns(column)
- val itemValues = udt.split(value)
- (0 until udt.fieldNames.size).foldLeft(Columns())(
- (cs, i) => {
- val itemName = udt.fieldNameAsString(i)
- val itemType = udt.fieldType(i)
- val itemValue = itemValues(i)
- if (isTombstone || itemValue == null)
- cs + column.withUDTName(itemName)
- else
- cs + columns(isTombstone, column.withUDTName(itemName), itemType, itemValue)
- })
- }
-
- private[this] def columns(
- isTombstone: Boolean,
- column: Column[_],
- tuple: TupleType,
- value: ByteBuffer): Columns = {
- if (isTombstone) return Columns(column)
- val itemValues = tuple.split(value)
- (0 until tuple.size).foldLeft(Columns())(
- (cs, i) => {
- val itemName = i.toString
- val itemType = tuple.`type`(i)
- val itemValue = itemValues(i)
- if (isTombstone || itemValue == null)
- cs + column.withUDTName(itemName)
- else
- cs + columns(isTombstone, column.withUDTName(itemName), itemType, itemValue)
- })
- }
-
- private[this] def frozenCollectionSize(bb: ByteBuffer): Int =
- CollectionSerializer.readCollectionSize(bb, CURRENT_VERSION)
-
- private[this] def frozenCollectionValue(bb: ByteBuffer): ByteBuffer =
- CollectionSerializer.readValue(bb, CURRENT_VERSION)
-
- def compose(bb: ByteBuffer, t: AbstractType[_]): Any = t match {
- case sdt: SimpleDateType => new Date(sdt.toTimeInMillis(bb))
- case _ => t.compose(bb)
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // Validation
- ///////////////////////////////////////////////////////////////////////////
-
- def validate(
- metadata: CFMetaData,
- column: String,
- field: String,
- supportedTypes: java.util.List[Class[_]]) {
-
- val cellName = Column.parse(column).cellName
- val cellDefinition = metadata.getColumnDefinition(UTF8Type.instance.decompose(cellName))
-
- if (cellDefinition == null) {
- throw new IndexException("No column definition '{}' for mapper '{}'", cellName, field)
- }
- if (cellDefinition.isStatic) {
- throw new IndexException("Lucene indexes are not allowed on static columns as '{}'", column)
- }
-
- def checkSupported(t: AbstractType[_], mapper: String) {
- if (!supports(t, supportedTypes)) {
- throw new IndexException(
- "Type '{}' in column '{}' is not supported by mapper '{}'",
- t,
- mapper,
- field)
- }
- }
-
- val cellType = cellDefinition.`type`
- val udtNames = Column.parse(column).udtNames
- if (udtNames.isEmpty) {
- checkSupported(cellType, cellName)
- } else {
- var col = Column.apply(cellName)
- var currentType = cellType
- for (i <- udtNames.indices) {
- col = col.withUDTName(udtNames(i))
- ColumnsMapper.childType(currentType, udtNames(i)) match {
- case None => throw new IndexException(
- "No column definition '{}' for mapper '{}'",
- col.mapperName,
- field)
- case Some(n) if i == udtNames.indices.last => checkSupported(n, col.mapperName)
- case Some(n) => currentType = n
- }
- }
- }
- }
-
- @tailrec
- def childType(parent: AbstractType[_], child: String): Option[AbstractType[_]] = parent match {
- case t: ReversedType[_] => childType(t.baseType, child)
- case t: SetType[_] => childType(t.nameComparator, child)
- case t: ListType[_] => childType(t.valueComparator, child)
- case t: MapType[_, _] => childType(t.valueComparator, child)
- case t: UserType =>
- (0 until t.fieldNames.size).find(t.fieldNameAsString(_) == child).map(t.fieldType)
- case t: TupleType => (0 until t.size).find(_.toString == child).map(t.`type`)
- case _ => None
- }
-
- @tailrec
- def supports(
- candidateType: AbstractType[_],
- supportedTypes: Seq[Class[_]]): Boolean = candidateType match {
- case t: ReversedType[_] => supports(t.baseType, supportedTypes)
- case t: SetType[_] => supports(t.getElementsType, supportedTypes)
- case t: ListType[_] => supports(t.getElementsType, supportedTypes)
- case t: MapType[_, _] => supports(t.getValuesType, supportedTypes)
- case _ =>
- val native = nativeType(candidateType)
- supportedTypes.exists(_ isAssignableFrom native)
- }
-
- def nativeType(validator: AbstractType[_]): Class[_] = validator match {
- case _: UTF8Type | _: AsciiType => classOf[String]
- case _: SimpleDateType | _: TimestampType => classOf[Date]
- case _: UUIDType | _: LexicalUUIDType | _: TimeUUIDType => classOf[UUID]
- case _: ShortType => classOf[java.lang.Short]
- case _: ByteType => classOf[java.lang.Byte]
- case _: Int32Type => classOf[Integer]
- case _: LongType => classOf[java.lang.Long]
- case _: IntegerType => classOf[BigInteger]
- case _: FloatType => classOf[java.lang.Float]
- case _: DoubleType => classOf[java.lang.Double]
- case _: DecimalType => classOf[BigDecimal]
- case _: BooleanType => classOf[java.lang.Boolean]
- case _: BytesType => classOf[ByteBuffer]
- case _: InetAddressType => classOf[InetAddress]
- case _ => throw new IndexException(s"Unsupported Cassandra data type: ${validator.getClass}")
- }
-
-}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/DocumentIterator.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/DocumentIterator.scala
index e9ef82008..d9286bc42 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/DocumentIterator.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/DocumentIterator.scala
@@ -17,39 +17,38 @@ package com.stratio.cassandra.lucene.index
import com.stratio.cassandra.lucene.IndexException
import com.stratio.cassandra.lucene.index.DocumentIterator._
-import com.stratio.cassandra.lucene.util.{TimeCounter, Tracer}
+import com.stratio.cassandra.lucene.util.{Logging, TimeCounter, Tracing}
import org.apache.cassandra.utils.CloseableIterator
import org.apache.lucene.document.Document
import org.apache.lucene.index.Term
import org.apache.lucene.search.BooleanClause.Occur._
import org.apache.lucene.search.EarlyTerminatingSortingCollector._
import org.apache.lucene.search._
-import org.slf4j.LoggerFactory
-
-import scala.collection.JavaConversions._
/** [[CloseableIterator]] for retrieving Lucene documents satisfying a query.
*
- * @param manager the Lucene index searcher manager
- * @param indexSort the sort of the index
- * @param querySort the sort in which the documents are going to be retrieved
- * @param query the query to be satisfied by the documents
- * @param limit the iteration page size
- * @param fields the names of the document fields to be loaded
+ * @param searcher the Lucene index searcher
+ * @param releaseSearcher a function for releasing the searcher
+ * @param indexSort the sort of the index
+ * @param querySort the sort in which the documents are going to be retrieved
+ * @param query the query to be satisfied by the documents
+ * @param limit the iteration page size
+ * @param fields the names of the document fields to be loaded
* @author Andres de la Pena `adelapena@stratio.com`
*/
-class DocumentIterator(manager: SearcherManager,
- afterTerm: Option[Term],
- indexSort: Sort,
- querySort: Sort,
- query: Query,
- limit: Int,
- fields: Set[String])
- extends Iterator[(Document, ScoreDoc)] with AutoCloseable {
+class DocumentIterator(
+ searcher: IndexSearcher,
+ releaseSearcher: () => Unit,
+ afterTerm: Option[Term],
+ indexSort: Sort,
+ querySort: Sort,
+ query: Query,
+ limit: Int,
+ fields: java.util.Set[String])
+ extends Iterator[(Document, ScoreDoc)] with AutoCloseable with Logging with Tracing {
private[this] val pageSize = Math.min(limit, MAX_PAGE_SIZE) + 1
private[this] val documents = new java.util.LinkedList[(Document, ScoreDoc)]
- private[this] val searcher = manager.acquire
private[this] var afterOffset = 0
private[this] var finished = false
@@ -60,33 +59,34 @@ class DocumentIterator(manager: SearcherManager,
querySort.rewrite(searcher)
} catch {
case e: Exception =>
- manager.release(searcher)
+ releaseSearcher()
throw new IndexException(e, s"Error rewriting sort $indexSort")
}
/** The start after position. */
private[this] var after = try {
- afterTerm.map(term => {
- val time = TimeCounter.create.start
- val builder = new BooleanQuery.Builder
- builder.add(new TermQuery(term), FILTER)
- builder.add(query, MUST)
- val scores = searcher.search(builder.build, 1, sort).scoreDocs
- if (scores.nonEmpty) {
- Tracer.trace("Lucene index seeks last index position")
- logger.debug(s"Start position found in ${time.stop}")
- scores.head
- } else throw new IndexException("Last page position not found")
- })
+ afterTerm.map(
+ term => {
+ val time = TimeCounter.start
+ val builder = new BooleanQuery.Builder
+ builder.add(new TermQuery(term), FILTER)
+ builder.add(query, MUST)
+ val scores = searcher.search(builder.build, 1, sort).scoreDocs
+ if (scores.nonEmpty) {
+ tracer.trace("Lucene index seeks last index position")
+ logger.debug(s"Start position found in $time")
+ scores.head
+ } else throw new IndexException("Last page position not found")
+ })
} catch {
case e: Exception =>
- manager.release(searcher)
+ releaseSearcher()
throw new IndexException(e, "Error while searching for the last page position")
}
private[this] def fetch() = {
try {
- val time = TimeCounter.create.start
+ val time = TimeCounter.start
val topDocs = if (afterTerm.isEmpty && canEarlyTerminate(sort, indexSort)) {
val fieldDoc = after.map(_.asInstanceOf[FieldDoc]).orNull
@@ -107,13 +107,13 @@ class DocumentIterator(manager: SearcherManager,
documents.add((document, scoreDoc))
}
- Tracer.trace(s"Lucene index fetches $numFetched documents")
- logger.debug(s"Page fetched with $numFetched documents in ${time.stop}")
+ tracer.trace(s"Lucene index fetches $numFetched documents")
+ logger.debug(s"Page fetched with $numFetched documents in $time")
} catch {
case e: Exception =>
close()
- throw new IndexException(logger, e, s"Error searching with $query and $sort")
+ throw new IndexException(e, s"Error searching with $query and $sort")
}
if (finished) close()
}
@@ -144,15 +144,14 @@ class DocumentIterator(manager: SearcherManager,
/** Closes the [[IndexSearcher]] and any other resources */
override def close() = {
- if (!closed) try manager.release(searcher) finally closed = true
+ if (!closed) try releaseSearcher() finally closed = true
}
}
+/** Companion object for [[DocumentIterator]]. */
object DocumentIterator {
- private val logger = LoggerFactory.getLogger(classOf[DocumentIterator])
-
/** The max number of rows to be read per iteration. */
- private val MAX_PAGE_SIZE = 10000
+ val MAX_PAGE_SIZE = 10000
}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/FSIndex.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/FSIndex.scala
index 95d782a2c..9e7ca2ae1 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/FSIndex.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/FSIndex.scala
@@ -17,14 +17,12 @@ package com.stratio.cassandra.lucene.index
import java.nio.file.Path
-import com.stratio.cassandra.lucene.index.FSIndex._
import org.apache.cassandra.io.util.FileUtils
import org.apache.lucene.analysis.Analyzer
import org.apache.lucene.document.Document
import org.apache.lucene.index._
import org.apache.lucene.search._
import org.apache.lucene.store.{Directory, FSDirectory, NRTCachingDirectory}
-import org.slf4j.LoggerFactory
/** Class wrapping a Lucene file system-based directory and its readers, writers and searchers.
*
@@ -37,16 +35,17 @@ import org.slf4j.LoggerFactory
* @param maxCachedMB the directory max cache size in MB
* @author Andres de la Pena `adelapena@stratio.com`
*/
-class FSIndex(name: String,
- path: Path,
- analyzer: Analyzer,
- refreshSeconds: Double,
- ramBufferMB: Int,
- maxMergeMB: Int,
- maxCachedMB: Int) {
+class FSIndex(
+ name: String,
+ path: Path,
+ analyzer: Analyzer,
+ refreshSeconds: Double,
+ ramBufferMB: Int,
+ maxMergeMB: Int,
+ maxCachedMB: Int) {
private[this] var mergeSort: Sort = _
- private[this] var fields: Set[String] = _
+ private[this] var fields: java.util.Set[String] = _
private[this] var directory: Directory = _
private[this] var writer: IndexWriter = _
private[this] var manager: SearcherManager = _
@@ -57,7 +56,7 @@ class FSIndex(name: String,
* @param mergeSort the sort to be applied to the index during merges
* @param fields the names of the document fields to be loaded
*/
- def init(mergeSort: Sort, fields: Set[String]) {
+ def init(mergeSort: Sort, fields: java.util.Set[String]) {
this.mergeSort = mergeSort
this.fields = fields
@@ -73,7 +72,7 @@ class FSIndex(name: String,
writer = new IndexWriter(directory, indexWriterConfig)
// Setup NRT search
- val searcherFactory: SearcherFactory = new SearcherFactory() {
+ val searcherFactory: SearcherFactory = new SearcherFactory {
override def newSearcher(reader: IndexReader, previousReader: IndexReader): IndexSearcher = {
val searcher = new IndexSearcher(reader)
searcher.setSimilarity(new NoIDFSimilarity)
@@ -99,7 +98,6 @@ class FSIndex(name: String,
* @param document the document to be added
*/
def upsert(term: Term, document: Document) {
- logger.debug(s"Indexing $document with term $term in $name")
writer.updateDocument(term, document)
}
@@ -108,7 +106,6 @@ class FSIndex(name: String,
* @param term the term identifying the documents to be deleted
*/
def delete(term: Term) {
- logger.debug(s"Deleting $term from $name")
writer.deleteDocuments(term)
}
@@ -117,7 +114,6 @@ class FSIndex(name: String,
* @param query the query identifying the documents to be deleted
*/
def delete(query: Query) {
- logger.debug(s"Deleting $query from $name")
writer.deleteDocuments(query)
}
@@ -125,30 +121,26 @@ class FSIndex(name: String,
def truncate() {
writer.deleteAll()
writer.commit()
- logger.info(s"Truncated $name")
}
/** Commits the pending changes. */
def commit() {
writer.commit()
- logger.debug(s"Committed $name")
}
/** Commits all changes to the index, waits for pending merges to complete, and closes all
* associated resources.
*/
def close() {
- reopener.interrupt()
+ reopener.close()
manager.close()
writer.close()
directory.close()
- logger.info(s"Closed $name")
}
/** Closes the index and removes all its files. */
def delete() {
try close() finally FileUtils.deleteRecursive(path.toFile)
- logger.info(s"Deleted $name")
}
/** Finds the top hits for a query and sort, starting from an optional position.
@@ -160,14 +152,15 @@ class FSIndex(name: String,
* @return the found documents, sorted according to the supplied [[Sort]] instance
*/
def search(after: Option[Term], query: Query, sort: Sort, count: Int): DocumentIterator = {
- logger.debug(
- s"""Searching in $name
- | after: $after
- | query: $query
- | count: $count
- | sort : $sort
- """.stripMargin)
- new DocumentIterator(manager, after, mergeSort, sort, query, count, fields)
+ val searcher = manager.acquire()
+ val releaseSearcher = () => manager.release(searcher)
+ new DocumentIterator(searcher, releaseSearcher, after, mergeSort, sort, query, count, fields)
+ }
+
+ def searcher: (IndexSearcher, () => Unit) = {
+ val searcher = manager.acquire()
+ val releaseSearcher = () => manager.release(searcher)
+ (searcher, releaseSearcher)
}
/** Returns the total number of documents in this index.
@@ -175,7 +168,6 @@ class FSIndex(name: String,
* @return the number of documents
*/
def getNumDocs: Int = {
- logger.debug(s"Getting $name num docs")
doWithSearcher(searcher => searcher.getIndexReader.numDocs)
}
@@ -184,7 +176,6 @@ class FSIndex(name: String,
* @return the number of deleted documents
*/
def getNumDeletedDocs: Int = {
- logger.debug(s"Getting $name num deleted docs")
doWithSearcher(searcher => searcher.getIndexReader.numDeletedDocs)
}
@@ -195,10 +186,8 @@ class FSIndex(name: String,
* @param doWait `true` if the call should block until the operation completes
*/
def forceMerge(maxNumSegments: Int, doWait: Boolean) {
- logger.info(s"Merging $name segments to $maxNumSegments")
writer.forceMerge(maxNumSegments, doWait)
writer.commit()
- logger.info(s"Merged $name segments to $maxNumSegments")
}
/** Optimizes the index forcing merge of all segments that have deleted documents.
@@ -207,23 +196,19 @@ class FSIndex(name: String,
* @param doWait `true` if the call should block until the operation completes
*/
def forceMergeDeletes(doWait: Boolean) {
- logger.info(s"Merging $name segments with deletions")
writer.forceMergeDeletes(doWait)
writer.commit()
- logger.info(s"Merged $name segments with deletions")
}
/** Refreshes the index readers. */
def refresh() {
manager.maybeRefreshBlocking()
- logger.debug(s"Refreshed $name readers")
}
}
+/** Companion object for [[FSIndex]]. */
object FSIndex {
- private val logger = LoggerFactory.getLogger(classOf[FSIndex])
-
// Disable max boolean query clauses limit
BooleanQuery.setMaxClauseCount(Integer.MAX_VALUE)
}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/PartitionedIndex.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/PartitionedIndex.scala
new file mode 100644
index 000000000..a2850881c
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/PartitionedIndex.scala
@@ -0,0 +1,215 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.index
+
+import java.io.File
+import java.nio.file.{Path, Paths}
+
+import com.stratio.cassandra.lucene.IndexException
+import com.stratio.cassandra.lucene.util.Logging
+import org.apache.cassandra.io.util.FileUtils.deleteRecursive
+import org.apache.lucene.analysis.Analyzer
+import org.apache.lucene.document.Document
+import org.apache.lucene.index.{MultiReader, Term}
+import org.apache.lucene.search.{IndexSearcher, Query, Sort}
+
+/** An [[FSIndex]] partitioned by some not specified criterion.
+ *
+ * @param partitions the number of index partitions
+ * @param name the index name
+ * @param path the directory path
+ * @param analyzer the index writer analyzer
+ * @param refreshSeconds the index reader refresh frequency in seconds
+ * @param ramBufferMB the index writer RAM buffer size in MB
+ * @param maxMergeMB the directory max merge size in MB
+ * @param maxCachedMB the directory max cache size in MB
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class PartitionedIndex(
+ partitions: Int,
+ name: String,
+ path: Path,
+ analyzer: Analyzer,
+ refreshSeconds: Double,
+ ramBufferMB: Int,
+ maxMergeMB: Int,
+ maxCachedMB: Int) extends Logging {
+
+ private[this] val indexes: List[FSIndex] = partitions match {
+ case 1 =>
+ List(new FSIndex(name, path, analyzer, refreshSeconds, ramBufferMB, maxMergeMB, maxCachedMB))
+ case n if n > 1 =>
+ val root = path.toFile.getAbsolutePath + File.separator
+ (0 until n)
+ .map(root + File.separator + _)
+ .map(Paths.get(_))
+ .map(new FSIndex(name, _, analyzer, refreshSeconds, ramBufferMB, maxMergeMB, maxCachedMB))
+ .toList
+ case _ => throw new IndexException(
+ s"The number of partitions should be strictly positive but found $partitions")
+ }
+
+ private[this] var mergeSort: Sort = _
+ private[this] var fields: java.util.Set[String] = _
+
+ /** Initializes this index with the specified merge sort and fields to be loaded.
+ *
+ * @param mergeSort the sort to be applied to the index during merges
+ * @param fields the names of the document fields to be loaded
+ */
+ def init(mergeSort: Sort, fields: java.util.Set[String]) {
+ this.mergeSort = mergeSort
+ this.fields = fields
+ indexes.foreach(_.init(mergeSort, fields))
+ }
+
+ /** Deletes all the documents. */
+ def truncate() {
+ indexes.foreach(_.truncate())
+ logger.info(s"Truncated $name")
+ }
+
+ /** Commits the pending changes. */
+ def commit() {
+ indexes.foreach(_.commit())
+ logger.debug(s"Committed $name")
+ }
+
+ /** Commits all changes to the index, waits for pending merges to complete, and closes all
+ * associated resources.
+ */
+ def close() {
+ indexes.foreach(_.close())
+ logger.info(s"Closed $name")
+ }
+
+ /** Closes the index and removes all its files. */
+ def delete() {
+ try indexes.foreach(_.delete()) finally if (partitions > 1) deleteRecursive(path.toFile)
+ logger.info(s"Deleted $name")
+ }
+
+ /** Optimizes the index forcing merge segments leaving the specified number of segments.
+ * This operation may block until all merging completes.
+ *
+ * @param maxNumSegments the maximum number of segments left in the index after merging finishes
+ * @param doWait `true` if the call should block until the operation completes
+ */
+ def forceMerge(maxNumSegments: Int, doWait: Boolean) {
+ logger.info(s"Merging $name segments to $maxNumSegments")
+ indexes.foreach(_.forceMerge(maxNumSegments, doWait))
+ logger.info(s"Merged $name segments to $maxNumSegments")
+ }
+
+ /** Optimizes the index forcing merge of all segments that have deleted documents.
+ * This operation may block until all merging completes.
+ *
+ * @param doWait `true` if the call should block until the operation completes
+ */
+ def forceMergeDeletes(doWait: Boolean) {
+ logger.info(s"Merging $name segments with deletions")
+ indexes.foreach(_.forceMergeDeletes(doWait))
+ logger.info(s"Merged $name segments with deletions")
+ }
+
+ /** Refreshes the index readers. */
+ def refresh(): Unit = {
+ indexes.foreach(_.refresh())
+ logger.debug(s"Refreshed $name readers")
+ }
+
+ /** Returns the total number of documents in this index.
+ *
+ * @return the number of documents
+ */
+ def getNumDocs: Long = {
+ logger.debug(s"Getting $name num docs")
+ (0L /: indexes) (_ + _.getNumDocs)
+ }
+
+ /** Returns the total number of deleted documents in this index.
+ *
+ * @return the number of deleted documents
+ */
+ def getNumDeletedDocs: Long = {
+ logger.debug(s"Getting $name num deleted docs")
+ (0L /: indexes) (_ + _.getNumDeletedDocs)
+ }
+
+ /** Upserts the specified document by first deleting the documents containing the specified term
+ * and then adding the new document. The delete and then add are atomic as seen by a reader on
+ * the same index (flush may happen only after the addition).
+ *
+ * @param partition the index partition where the operation will be done
+ * @param term the term to identify the document(s) to be deleted
+ * @param document the document to be added
+ */
+ def upsert(partition: Int, term: Term, document: Document) {
+ logger.debug(s"Indexing $document with term $term in $name")
+ indexes(partition).upsert(term, document)
+ }
+
+ /** Deletes all the documents containing the specified term.
+ *
+ * @param partition the index partition where the operation will be done
+ * @param term the term identifying the documents to be deleted
+ */
+ def delete(partition: Int, term: Term) {
+ logger.debug(s"Deleting $term from $name")
+ indexes(partition).delete(term)
+ }
+
+ /** Deletes all the documents satisfying the specified query.
+ *
+ * @param partition the index partition where the operation will be done
+ * @param query the query identifying the documents to be deleted
+ */
+ def delete(partition: Int, query: Query) {
+ logger.debug(s"Deleting $query from $name")
+ indexes(partition).delete(query)
+ }
+
+ /** Finds the top hits for a query and sort, starting from an optional position.
+ *
+ * @param partitions the index partitions where the operation will be done
+ * @param after the starting term
+ * @param query the query to search for
+ * @param sort the sort to be applied
+ * @param count the max number of results to be collected
+ * @return the found documents, sorted according to the supplied [[Sort]] instance
+ */
+ def search(partitions: List[Int], after: Option[Term], query: Query, sort: Sort, count: Int)
+ : DocumentIterator = {
+ logger.debug(
+ s"""Searching in $name (${partitions.mkString(", ")})
+ | after: $after
+ | query: $query
+ | count: $count
+ | sort : $sort
+ """.stripMargin)
+ partitions match {
+ case partition :: Nil => indexes(partition).search(after, query, sort, count)
+ case _ =>
+ val searchers = partitions.map(indexes(_).searcher)
+ val readers = searchers.map(_._1.getIndexReader)
+ val reader = new MultiReader(readers.toArray: _*)
+ val searcher = new IndexSearcher(reader)
+ val release = () => searchers.foreach(_._2.apply())
+ new DocumentIterator(searcher, release, after, mergeSort, sort, query, count, fields)
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/RAMIndex.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/RAMIndex.scala
index 648d2062d..330caf954 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/RAMIndex.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/RAMIndex.scala
@@ -21,8 +21,6 @@ import org.apache.lucene.index.{DirectoryReader, IndexWriter, IndexWriterConfig}
import org.apache.lucene.search.{IndexSearcher, Query, ScoreDoc, Sort}
import org.apache.lucene.store.RAMDirectory
-import scala.collection.JavaConversions._
-
/** Class wrapping a Lucene RAM directory and its readers, writers and searchers for NRT.
*
* @param analyzer the index writer analyzer
@@ -41,8 +39,7 @@ class RAMIndex(analyzer: Analyzer) {
indexWriter.addDocument(document)
}
- /**
- * Commits all changes to the index, waits for pending merges to complete, and closes all
+ /** Commits all pending changes to the index, waits for pending merges to complete, and closes all
* associated resources.
*/
def close() {
@@ -52,9 +49,9 @@ class RAMIndex(analyzer: Analyzer) {
/** Finds the top count hits for a query and a sort.
*
- * @param query the query to search for
- * @param sort the sort to be applied
- * @param count the max number of results to be collected
+ * @param query the query to search for
+ * @param sort the sort to be applied
+ * @param count the max number of results to be collected
* @param fields the names of the fields to be loaded
* @return the found documents
*/
@@ -62,7 +59,7 @@ class RAMIndex(analyzer: Analyzer) {
query: Query,
sort: Sort,
count: Integer,
- fields: Set[String]): Seq[(Document, ScoreDoc)] = {
+ fields: java.util.Set[String]): Seq[(Document, ScoreDoc)] = {
indexWriter.commit()
val reader = DirectoryReader.open(directory)
val searcher = new IndexSearcher(reader)
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/TokenLengthAnalyzer.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/TokenLengthAnalyzer.scala
index caca5aea3..f469147a3 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/index/TokenLengthAnalyzer.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/index/TokenLengthAnalyzer.scala
@@ -16,13 +16,12 @@
package com.stratio.cassandra.lucene.index
import com.google.common.base.MoreObjects
-import com.stratio.cassandra.lucene.index.TokenLengthAnalyzer._
+import com.stratio.cassandra.lucene.util.Logging
import org.apache.lucene.analysis.Analyzer.TokenStreamComponents
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute
import org.apache.lucene.analysis.util.FilteringTokenFilter
import org.apache.lucene.analysis.{Analyzer, AnalyzerWrapper}
import org.apache.lucene.index.IndexWriter
-import org.slf4j.LoggerFactory
/** [[AnalyzerWrapper]] that discards too large tokens.
*
@@ -30,7 +29,7 @@ import org.slf4j.LoggerFactory
* @author Andres de la Pena `adelapena@stratio.com`
*/
class TokenLengthAnalyzer(val analyzer: Analyzer)
- extends AnalyzerWrapper(analyzer.getReuseStrategy) {
+ extends AnalyzerWrapper(analyzer.getReuseStrategy) with Logging {
/** inheritdoc */
override protected def getWrappedAnalyzer(fieldName: String): Analyzer = analyzer
@@ -65,7 +64,3 @@ class TokenLengthAnalyzer(val analyzer: Analyzer)
}
}
-
-object TokenLengthAnalyzer {
- val logger = LoggerFactory.getLogger(classOf[TokenLengthAnalyzer])
-}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/ClusteringMapper.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ClusteringMapper.scala
similarity index 84%
rename from plugin/src/main/scala/com/stratio/cassandra/lucene/key/ClusteringMapper.scala
rename to plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ClusteringMapper.scala
index f1409ec52..cedf6fe92 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/ClusteringMapper.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ClusteringMapper.scala
@@ -13,14 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.key
+package com.stratio.cassandra.lucene.mapping
import java.nio.ByteBuffer
import com.google.common.base.MoreObjects
import com.google.common.primitives.Longs
-import com.stratio.cassandra.lucene.column.{Column, Columns, ColumnsMapper}
-import com.stratio.cassandra.lucene.key.ClusteringMapper._
+import com.stratio.cassandra.lucene.mapping.ClusteringMapper._
import com.stratio.cassandra.lucene.util.ByteBufferUtils
import com.stratio.cassandra.lucene.util.ByteBufferUtils._
import org.apache.cassandra.config.CFMetaData
@@ -38,7 +37,7 @@ import org.apache.lucene.search.FieldComparator.TermValComparator
import org.apache.lucene.search._
import org.apache.lucene.util.{AttributeSource, BytesRef}
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
/** Class for several clustering key mappings between Cassandra and Lucene.
*
@@ -53,21 +52,7 @@ class ClusteringMapper(metadata: CFMetaData) {
/** A composite type composed by the types of the clustering key */
val clusteringType = CompositeType.getInstance(comparator.subtypes)
- /** Returns the columns contained in the specified [[Clustering]].
- *
- * @param clustering the clustering key
- * @return the columns
- */
- def columns(clustering: Clustering): Columns = {
- metadata.clusteringColumns.foldLeft(new Columns)(
- (columns, columnDefinition) => {
- val name = columnDefinition.name.toString
- val position = columnDefinition.position
- val value = clustering.get(position)
- val valueType = columnDefinition.cellValueType
- columns.add(Column.apply(name).withValue(ColumnsMapper.compose(value, valueType)))
- })
- }
+ val clusteringColumns = metadata.clusteringColumns.asScala
/** Returns a list of Lucene [[IndexableField]]s representing the specified primary key.
*
@@ -75,8 +60,7 @@ class ClusteringMapper(metadata: CFMetaData) {
* @param clustering the clustering key
* @return a indexable field
*/
- def indexableFields(key: DecoratedKey, clustering: Clustering): java.util.List[IndexableField] = {
- // TODO: return Seq
+ def indexableFields(key: DecoratedKey, clustering: Clustering): List[IndexableField] = {
// Build stored field for clustering key retrieval
val plainClustering = bytesRef(byteBuffer(clustering))
@@ -96,7 +80,7 @@ class ClusteringMapper(metadata: CFMetaData) {
* @return a byte buffer representing `clustering`
*/
def byteBuffer(clustering: Clustering): ByteBuffer = {
- clustering.getRawValues.foldLeft(clusteringType.builder)(_ add _).build()
+ (clusteringType.builder /: clustering.getRawValues) (_ add _) build()
}
/** Returns the [[String]] human-readable representation of the specified [[ClusteringPrefix]].
@@ -166,14 +150,13 @@ class ClusteringMapper(metadata: CFMetaData) {
* @return the Lucene query
*/
def query(key: DecoratedKey, filter: ClusteringIndexSliceFilter): Query = {
- filter.requestedSlices.foldLeft(new BooleanQuery.Builder)(
- (builder, slice) => {
- builder.add(query(key, slice), SHOULD)
- }).build
+ (new BooleanQuery.Builder /: filter.requestedSlices.asScala) (
+ (builder, slice) => builder.add(query(key, slice), SHOULD)).build()
}
}
+/** Companion object for [[ClusteringMapper]]. */
object ClusteringMapper {
/** The Lucene field name. */
@@ -247,23 +230,15 @@ object ClusteringMapper {
* @param mapper the primary key mapper to be used
*/
class ClusteringSort(mapper: ClusteringMapper) extends SortField(
- FIELD_NAME, new FieldComparatorSource {
- override def newComparator(
- field: String,
- hits: Int,
- sortPos: Int,
- reversed: Boolean): FieldComparator[_] = {
- new TermValComparator(hits, field, false) {
- override def compareValues(t1: BytesRef, t2: BytesRef): Int = {
- val comp = compareUnsigned(t1.bytes, 0, PREFIX_SIZE, t2.bytes, 0, PREFIX_SIZE)
- if (comp != 0) return comp
- val bb1 = ByteBuffer.wrap(t1.bytes, PREFIX_SIZE, t1.length - PREFIX_SIZE)
- val bb2 = ByteBuffer.wrap(t2.bytes, PREFIX_SIZE, t2.length - PREFIX_SIZE)
- val clustering1 = mapper.clustering(bb1)
- val clustering2 = mapper.clustering(bb2)
- mapper.comparator.compare(clustering1, clustering2)
- }
- }
+ FIELD_NAME, (field, hits, sortPos, reversed) => new TermValComparator(hits, field, false) {
+ override def compareValues(t1: BytesRef, t2: BytesRef): Int = {
+ val comp = compareUnsigned(t1.bytes, 0, PREFIX_SIZE, t2.bytes, 0, PREFIX_SIZE)
+ if (comp != 0) return comp
+ val bb1 = ByteBuffer.wrap(t1.bytes, PREFIX_SIZE, t1.length - PREFIX_SIZE)
+ val bb2 = ByteBuffer.wrap(t2.bytes, PREFIX_SIZE, t2.length - PREFIX_SIZE)
+ val clustering1 = mapper.clustering(bb1)
+ val clustering2 = mapper.clustering(bb2)
+ mapper.comparator.compare(clustering1, clustering2)
}
}) {
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ColumnsMapper.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ColumnsMapper.scala
new file mode 100644
index 000000000..bbf0aaffd
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ColumnsMapper.scala
@@ -0,0 +1,246 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.mapping
+
+import java.nio.ByteBuffer
+
+import com.stratio.cassandra.lucene.column.{Column, Columns}
+import com.stratio.cassandra.lucene.schema.Schema
+import org.apache.cassandra.config.{CFMetaData, ColumnDefinition}
+import org.apache.cassandra.db.marshal._
+import org.apache.cassandra.db.rows.{Cell, ComplexColumnData, Row}
+import org.apache.cassandra.db.{Clustering, DecoratedKey}
+import org.apache.cassandra.serializers.CollectionSerializer
+import org.apache.cassandra.transport.Server._
+import org.apache.cassandra.utils.ByteBufferUtil
+
+import scala.collection.JavaConverters._
+
+/** Maps Cassandra rows to [[Columns]].
+ *
+ * @param schema a schema
+ * @param metadata a table metadata
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class ColumnsMapper(schema: Schema, metadata: CFMetaData) {
+
+ val mappedCells: Set[String] = schema.mappedCells().asScala.toSet
+
+ val keyColumns: List[ColumnDefinition] = metadata.partitionKeyColumns.asScala
+ .filter(definition => mappedCells.contains(definition.name.toString)).toList
+
+ val clusteringColumns: List[ColumnDefinition] = metadata.clusteringColumns.asScala
+ .filter(definition => mappedCells.contains(definition.name.toString)).toList
+
+ /** Returns the mapped, not deleted at the specified time in seconds and not null [[Columns]]
+ * contained in the specified row.
+ *
+ * @param key the partition key
+ * @param row the row
+ * @param now now in seconds
+ */
+ def columns(key: DecoratedKey, row: Row, now: Int): Columns = {
+ columns(key) ++ columns(row.clustering()) ++ columns(row, now)
+ }
+
+ /** Returns the mapped [[Columns]] contained in the specified partition key. */
+ private[mapping] def columns(key: DecoratedKey): Columns = {
+ val components = metadata.getKeyValidator match {
+ case c: CompositeType => c.split(key.getKey)
+ case _ => Array[ByteBuffer](key.getKey)
+ }
+ (keyColumns :\ Columns()) ((definition, columns) => {
+ val name = definition.name.toString
+ val value = components(definition.position)
+ val valueType = definition.cellValueType
+ Column(name).withValue(value, valueType) :: columns
+ })
+ }
+
+ /** Returns the mapped [[Columns]] contained in the specified clustering key. */
+ private[mapping] def columns(clustering: Clustering): Columns = {
+ (clusteringColumns :\ Columns()) ((definition, columns) => {
+ val name = definition.name.toString
+ val position = definition.position
+ val value = clustering.get(position)
+ val valueType = definition.cellValueType
+ Column(name).withValue(value, valueType) :: columns
+ })
+ }
+
+ /** Returns the mapped, not deleted at the specified time in seconds and not null [[Columns]]
+ * contained in the regular columns of the specified row.
+ *
+ * @param row a row
+ * @param now now in seconds
+ */
+ private[mapping] def columns(row: Row, now: Int): Columns = {
+ (row.columns.asScala :\ Columns()) ((definition, columns) =>
+ if (definition.isComplex) {
+ this.columns(row.getComplexColumnData(definition), now) ++ columns
+ } else {
+ this.columns(row.getCell(definition), now) ++ columns
+ }
+ )
+ }
+
+ /** Returns the mapped, not deleted at the specified time in seconds and not null [[Columns]]
+ * contained in the specified complex column data.
+ *
+ * @param complexColumnData a complex column data
+ * @param now now in seconds
+ */
+ private[mapping] def columns(complexColumnData: ComplexColumnData, now: Int): Columns = {
+ (complexColumnData.asScala :\ Columns()) ((cell, columns) => {
+ this.columns(cell, now) ++ columns
+ })
+ }
+
+ /** Returns the mapped, not deleted at the specified time in seconds and not null [[Columns]]
+ * contained in the specified cell.
+ *
+ * @param cell a cell
+ * @param now now in seconds
+ */
+ private[mapping] def columns(cell: Cell, now: Int): Columns =
+ if (cell.isTombstone
+ || cell.localDeletionTime <= now
+ || !mappedCells.contains(cell.column.name.toString))
+ Columns.empty
+ else ColumnsMapper.columns(cell)
+
+}
+
+/** Companion object for [[ColumnsMapper]]. */
+object ColumnsMapper {
+
+ /** Returns [[Columns]] contained in the specified cell.
+ *
+ * @param cell a cell
+ */
+ private[mapping] def columns(cell: Cell): Columns = {
+ if (cell == null) return Columns()
+ val name = cell.column.name.toString
+ val comparator = cell.column.`type`
+ val value = cell.value
+ val column = Column(name)
+ comparator match {
+ case setType: SetType[_] if !setType.isFrozenCollection =>
+ val itemComparator = setType.nameComparator
+ val itemValue = cell.path.get(0)
+ columns(column, itemComparator, itemValue)
+ case listType: ListType[_] if !listType.isFrozenCollection =>
+ val itemComparator = listType.valueComparator
+ columns(column, itemComparator, value)
+ case mapType: MapType[_, _] if !mapType.isFrozenCollection =>
+ val itemComparator = mapType.valueComparator
+ val keyValue = cell.path.get(0)
+ val keyComparator = mapType.nameComparator
+ val nameSuffix = keyComparator.compose(keyValue).toString
+ columns(column.withMapName(nameSuffix), itemComparator, value)
+ case userType: UserType =>
+ val cellPath = cell.path
+ if (cellPath == null) {
+ columns(column, comparator, value)
+ } else {
+ val position = ByteBufferUtil.toShort(cellPath.get(0))
+ val name = userType.fieldNameAsString(position)
+ val typo = userType.`type`(position)
+ columns(column.withUDTName(name), typo, value)
+ }
+ case _ =>
+ columns(column, comparator, value)
+ }
+ }
+
+ private[mapping] def columns(column: Column, serializer: AbstractType[_], value: ByteBuffer)
+ : Columns = serializer match {
+ case t: SetType[_] => columns(column, t, value)
+ case t: ListType[_] => columns(column, t, value)
+ case t: MapType[_, _] => columns(column, t, value)
+ case t: UserType => columns(column, t, value)
+ case t: TupleType => columns(column, t, value)
+ case _ => Columns(column.withValue(value, serializer))
+ }
+
+ private[mapping] def columns(column: Column, set: SetType[_], value: ByteBuffer): Columns = {
+ val nameType = set.nameComparator
+ val bb = ByteBufferUtil.clone(value) // CollectionSerializer read functions are impure
+ ((0 until frozenCollectionSize(bb)) :\ Columns()) (
+ (_, columns) => {
+ val itemValue = frozenCollectionValue(bb)
+ this.columns(column, nameType, itemValue) ++ columns
+ })
+ }
+
+ private[mapping] def columns(column: Column, list: ListType[_], value: ByteBuffer): Columns = {
+ val valueType = list.valueComparator
+ val bb = ByteBufferUtil.clone(value) // CollectionSerializer read functions are impure
+ ((0 until frozenCollectionSize(bb)) :\ Columns()) ((_, columns) => {
+ val itemValue = frozenCollectionValue(bb)
+ this.columns(column, valueType, itemValue) ++ columns
+ })
+ }
+
+ private[mapping] def columns(column: Column, map: MapType[_, _], value: ByteBuffer): Columns = {
+ val itemKeysType = map.nameComparator
+ val itemValuesType = map.valueComparator
+ val bb = ByteBufferUtil.clone(value) // CollectionSerializer read functions are impure
+ ((0 until frozenCollectionSize(bb)) :\ Columns()) ((_, columns) => {
+ val itemKey = frozenCollectionValue(bb)
+ val itemValue = frozenCollectionValue(bb)
+ val itemName = itemKeysType.compose(itemKey).toString
+ this.columns(column.withMapName(itemName), itemValuesType, itemValue) ++ columns
+ })
+ }
+
+ private[mapping] def columns(column: Column, udt: UserType, value: ByteBuffer): Columns = {
+ val itemValues = udt.split(value)
+ ((0 until udt.fieldNames.size) :\ Columns()) ((i, columns) => {
+ val itemValue = itemValues(i)
+ if (itemValue == null) {
+ columns
+ } else {
+ val itemName = udt.fieldNameAsString(i)
+ val itemType = udt.fieldType(i)
+ val itemColumn = column.withUDTName(itemName)
+ this.columns(itemColumn, itemType, itemValue) ++ columns
+ }
+ })
+ }
+
+ private[mapping] def columns(column: Column, tuple: TupleType, value: ByteBuffer): Columns = {
+ val itemValues = tuple.split(value)
+ ((0 until tuple.size) :\ Columns()) ((i, columns) => {
+ val itemValue = itemValues(i)
+ if (itemValue == null) {
+ columns
+ } else {
+ val itemName = i.toString
+ val itemType = tuple.`type`(i)
+ val itemColumn = column.withUDTName(itemName)
+ this.columns(itemColumn, itemType, itemValue) ++ columns
+ }
+ })
+ }
+
+ private[this] def frozenCollectionSize(bb: ByteBuffer): Int =
+ CollectionSerializer.readCollectionSize(bb, CURRENT_VERSION)
+
+ private[this] def frozenCollectionValue(bb: ByteBuffer): ByteBuffer =
+ CollectionSerializer.readValue(bb, CURRENT_VERSION)
+
+}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ExpressionMapper.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ExpressionMapper.scala
new file mode 100644
index 000000000..1d1104253
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/ExpressionMapper.scala
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.mapping
+
+import com.stratio.cassandra.lucene.IndexException
+import com.stratio.cassandra.lucene.mapping.ExpressionMapper.parse
+import com.stratio.cassandra.lucene.search.{Search, SearchBuilder}
+import org.apache.cassandra.config.{CFMetaData, ColumnDefinition}
+import org.apache.cassandra.cql3.Operator
+import org.apache.cassandra.cql3.statements.IndexTarget._
+import org.apache.cassandra.db.ReadCommand
+import org.apache.cassandra.db.filter.RowFilter
+import org.apache.cassandra.db.filter.RowFilter.{CustomExpression, Expression}
+import org.apache.cassandra.db.marshal.UTF8Type
+import org.apache.cassandra.db.rows.{BTreeRow, BufferCell, Row}
+import org.apache.cassandra.schema.IndexMetadata
+import org.apache.commons.lang3.StringUtils
+import org.apache.lucene.search.ScoreDoc
+
+import scala.collection.JavaConverters._
+
+/** Class for several [[Expression]] mappings between Cassandra and Lucene.
+ *
+ * @param tableMetadata the indexed table metadata
+ * @param indexMetadata the index metadata
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+case class ExpressionMapper(tableMetadata: CFMetaData, indexMetadata: IndexMetadata) {
+
+ val name = indexMetadata.name
+ val column = Option(indexMetadata.options.get(TARGET_OPTION_NAME)).filterNot(StringUtils.isBlank)
+ val columns = tableMetadata.allColumns.asScala.toSet
+ val columnDefinition = column.flatMap(name => columns.find(_.name.toString == name))
+
+ /** Returns the first [[Search]] contained in the specified read command.
+ *
+ * @param command a command
+ * @return the `string` JSON search represented by `command`
+ * @throws IndexException if there is no such search
+ */
+ def search(command: ReadCommand): Search = parse(json(command))
+
+ /** Returns the [[Search]] represented by the specified CQL expression.
+ *
+ * @param expression a expression
+ * @return the `string` JSON search represented by `expression`
+ * @throws IndexException if there is no such search
+ */
+ def search(expression: Expression): Search = parse(json(expression))
+
+ /** Returns the first `string` JSON search contained in the specified read command.
+ *
+ * @param command a command
+ * @return the `string` JSON search represented by `command`
+ * @throws IndexException if there is no such expression
+ */
+ def json(command: ReadCommand): String = {
+ command.rowFilter.getExpressions.asScala.collect {
+ case e: CustomExpression if name == e.getTargetIndex.name => e.getValue
+ case e if supports(e) => e.getIndexValue
+ }.map(UTF8Type.instance.compose).head
+ }
+
+ /** Returns the `string` JSON search represented by the specified CQL expression.
+ *
+ * @param expression a expression
+ * @return the `string` JSON search represented by `expression`
+ * @throws IndexException if there is no such expression
+ */
+ def json(expression: Expression): String = {
+ UTF8Type.instance.compose(
+ expression match {
+ case e: CustomExpression if name == e.getTargetIndex.name => e.getValue
+ case e if supports(e) => e.getIndexValue
+ case _ => throw new IndexException(s"Unsupported expression $expression")
+ })
+ }
+
+ /** Returns if the specified expression is targeted to this index
+ *
+ * @param expression a CQL query expression
+ * @return `true` if `expression` is targeted to this index, `false` otherwise
+ */
+ def supports(expression: RowFilter.Expression): Boolean = {
+ supports(expression.column, expression.operator)
+ }
+
+ /** Returns if a CQL expression with the specified column definition and operator is targeted to
+ * this index.
+ *
+ * @param definition the expression column definition
+ * @param operator the expression operator
+ * @return `true` if the expression is targeted to this index, `false` otherwise
+ */
+ def supports(definition: ColumnDefinition, operator: Operator): Boolean = {
+ operator == Operator.EQ && column.contains(definition.name.toString)
+ }
+
+ /** Returns a copy of the specified [[RowFilter]] without any Lucene expressions.
+ *
+ * @param filter a row filter
+ * @return a copy of `filter` without Lucene expressions
+ */
+ def postIndexQueryFilter(filter: RowFilter): RowFilter = {
+ if (column.isEmpty) return filter
+ (filter /: filter.asScala) ((f, e) => if (supports(e)) f.without(e) else f)
+ }
+
+ /** Returns a new row decorating the specified row with the specified Lucene score.
+ *
+ * @param row the row to be decorated
+ * @param score a Lucene search score
+ * @param nowInSec the operation time in seconds
+ * @return a new decorated row
+ */
+ def decorate(row: Row, score: ScoreDoc, nowInSec: Int): Row = {
+
+ // Skip if there is no base column or score
+ if (columnDefinition.isEmpty) return row
+
+ // Copy row
+ val builder = BTreeRow.unsortedBuilder(nowInSec)
+ builder.newRow(row.clustering())
+ builder.addRowDeletion(row.deletion)
+ builder.addPrimaryKeyLivenessInfo(row.primaryKeyLivenessInfo)
+ row.cells.forEach(builder addCell _)
+
+ // Add score cell
+ val timestamp = row.primaryKeyLivenessInfo.timestamp
+ val scoreCellValue = UTF8Type.instance.decompose(score.score.toString)
+ builder.addCell(BufferCell.live(columnDefinition.get, timestamp, scoreCellValue))
+
+ builder.build
+ }
+}
+
+/** Companion object for [[ExpressionMapper]]. */
+object ExpressionMapper {
+
+ def parse(json: String): Search = SearchBuilder.fromJson(json).build
+
+}
\ No newline at end of file
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/KeyMapper.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/KeyMapper.scala
similarity index 88%
rename from plugin/src/main/scala/com/stratio/cassandra/lucene/key/KeyMapper.scala
rename to plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/KeyMapper.scala
index af671f710..c7451f604 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/KeyMapper.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/KeyMapper.scala
@@ -13,11 +13,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.key
+package com.stratio.cassandra.lucene.mapping
import java.nio.ByteBuffer
-import com.stratio.cassandra.lucene.key.KeyMapper.FIELD_NAME
+import com.stratio.cassandra.lucene.mapping.KeyMapper.FIELD_NAME
import com.stratio.cassandra.lucene.util.ByteBufferUtils
import org.apache.cassandra.config.CFMetaData
import org.apache.cassandra.db.filter.ClusteringIndexNamesFilter
@@ -29,7 +29,7 @@ import org.apache.lucene.search.BooleanClause.Occur.SHOULD
import org.apache.lucene.search.{BooleanQuery, Query, TermQuery}
import org.apache.lucene.util.BytesRef
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
/** Class for several primary key mappings between Cassandra and Lucene.
*
@@ -47,14 +47,13 @@ class KeyMapper(metadata: CFMetaData) {
/** The type of the primary key, which is composed by token and clustering key types. */
val keyType = CompositeType.getInstance(metadata.getKeyValidator, clusteringType)
- /**
- * Returns a [[ByteBuffer]] representing the specified clustering key
+ /** Returns a [[ByteBuffer]] representing the specified clustering key
*
* @param clustering the clustering key
* @return the byte buffer representing `clustering`
*/
private def byteBuffer(clustering: Clustering): ByteBuffer = {
- clustering.getRawValues.foldLeft(clusteringType.builder)(_ add _).build
+ (clusteringType.builder /: clustering.getRawValues) (_ add _) build()
}
/** Returns the Lucene [[IndexableField]] representing the specified primary key.
@@ -98,12 +97,13 @@ class KeyMapper(metadata: CFMetaData) {
* @return the Lucene query
*/
def query(key: DecoratedKey, filter: ClusteringIndexNamesFilter): Query = {
- filter.requestedRows.foldLeft(new BooleanQuery.Builder)(
- (b, c) => b.add(query(key, c), SHOULD)).build
+ (new BooleanQuery.Builder /: filter.requestedRows.asScala) (
+ (builder, clustering) => builder.add(query(key, clustering), SHOULD)) build()
}
}
+/** Companion object for [[KeyMapper]]. */
object KeyMapper {
/** The Lucene field name. */
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/PartitionMapper.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/PartitionMapper.scala
similarity index 76%
rename from plugin/src/main/scala/com/stratio/cassandra/lucene/key/PartitionMapper.scala
rename to plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/PartitionMapper.scala
index 2c85a7deb..e0697da19 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/PartitionMapper.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/PartitionMapper.scala
@@ -13,23 +13,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.key
+package com.stratio.cassandra.lucene.mapping
import java.nio.ByteBuffer
-import com.stratio.cassandra.lucene.column.{Column, Columns, ColumnsMapper}
-import com.stratio.cassandra.lucene.key.PartitionMapper._
+import com.stratio.cassandra.lucene.mapping.PartitionMapper._
import com.stratio.cassandra.lucene.util.ByteBufferUtils
import org.apache.cassandra.config.{CFMetaData, DatabaseDescriptor}
import org.apache.cassandra.db.DecoratedKey
-import org.apache.cassandra.db.marshal.CompositeType
import org.apache.lucene.document.{Document, Field, FieldType}
import org.apache.lucene.index.{DocValuesType, IndexOptions, IndexableField, Term}
import org.apache.lucene.search.FieldComparator.TermValComparator
import org.apache.lucene.search._
import org.apache.lucene.util.BytesRef
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
/** Class for several partition key mappings between Cassandra and Lucene.
*
@@ -40,27 +38,7 @@ class PartitionMapper(metadata: CFMetaData) {
val partitioner = DatabaseDescriptor.getPartitioner
val validator = metadata.getKeyValidator
-
- /** Returns the columns contained in the partition key of the specified row.
- *
- * @param key the partition key
- * @return the columns
- */
- def columns(key: DecoratedKey): Columns = {
-
- val components = validator match {
- case c: CompositeType => c.split(key.getKey)
- case _ => Array[ByteBuffer](key.getKey)
- }
-
- metadata.partitionKeyColumns.foldLeft(new Columns)(
- (columns, cd) => {
- val name = cd.name.toString
- val value = components(cd.position)
- val valueType = cd.cellValueType
- columns.add(Column(name).withValue(ColumnsMapper.compose(value, valueType)))
- })
- }
+ val partitionKeyColumns = metadata.partitionKeyColumns.asScala
/** Returns the Lucene indexable field representing to the specified partition key.
*
@@ -131,6 +109,7 @@ class PartitionMapper(metadata: CFMetaData) {
}
+/** Companion object for [[PartitionMapper]]. */
object PartitionMapper {
/** The Lucene field name. */
@@ -152,19 +131,11 @@ object PartitionMapper {
* @author Andres de la Pena `adelapena@stratio.com`
*/
class PartitionSort(mapper: PartitionMapper) extends SortField(
- FIELD_NAME, new FieldComparatorSource {
- override def newComparator(
- field: String,
- hits: Int,
- sortPos: Int,
- reversed: Boolean): FieldComparator[_] = {
- new TermValComparator(hits, field, false) {
- override def compareValues(t1: BytesRef, t2: BytesRef): Int = {
- val bb1 = ByteBufferUtils.byteBuffer(t1)
- val bb2 = ByteBufferUtils.byteBuffer(t2)
- mapper.validator.compare(bb1, bb2)
- }
- }
+ FIELD_NAME, (field, hits, sortPos, reversed) => new TermValComparator(hits, field, false) {
+ override def compareValues(t1: BytesRef, t2: BytesRef): Int = {
+ val bb1 = ByteBufferUtils.byteBuffer(t1)
+ val bb2 = ByteBufferUtils.byteBuffer(t2)
+ mapper.validator.compare(bb1, bb2)
}
}) {
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/TokenMapper.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/TokenMapper.scala
similarity index 95%
rename from plugin/src/main/scala/com/stratio/cassandra/lucene/key/TokenMapper.scala
rename to plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/TokenMapper.scala
index a57913391..b1de58494 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/key/TokenMapper.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/mapping/TokenMapper.scala
@@ -13,10 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.key
+package com.stratio.cassandra.lucene.mapping
import com.stratio.cassandra.lucene.IndexException
-import com.stratio.cassandra.lucene.key.TokenMapper._
+import com.stratio.cassandra.lucene.mapping.TokenMapper._
import org.apache.cassandra.config.DatabaseDescriptor
import org.apache.cassandra.db.DecoratedKey
import org.apache.cassandra.dht.{Murmur3Partitioner, Token}
@@ -56,8 +56,8 @@ class TokenMapper {
/** Returns a query to find the documents containing a token inside the specified token range.
*
- * @param lower the lower token
- * @param upper the upper token
+ * @param lower the lower token
+ * @param upper the upper token
* @param includeLower if the lower token should be included
* @param includeUpper if the upper token should be included
* @return the query to find the documents containing a token inside the range
@@ -94,6 +94,7 @@ class TokenMapper {
}
+/** Companion object for [[TokenMapper]]. */
object TokenMapper {
/** The Lucene field name */
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/Partitioner.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/Partitioner.scala
new file mode 100644
index 000000000..cc060b30f
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/Partitioner.scala
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.partitioning
+
+import com.fasterxml.jackson.annotation.{JsonSubTypes, JsonTypeInfo}
+import com.stratio.cassandra.lucene.common.JsonSerializer
+import org.apache.cassandra.db.{DecoratedKey, ReadCommand}
+
+/** Class defining an index partitioning strategy.
+ *
+ * Index partitioning is useful to speed up some searches to the detriment of others, depending on
+ * the implementation.
+ *
+ * It is also useful to overcome the Lucene's hard limit of 2147483519 documents per index.
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+@JsonTypeInfo(
+ use = JsonTypeInfo.Id.NAME,
+ include = JsonTypeInfo.As.PROPERTY,
+ property = "type",
+ defaultImpl = classOf[PartitionerOnNone])
+@JsonSubTypes(Array(
+ new JsonSubTypes.Type(value = classOf[PartitionerOnNone], name = "none"),
+ new JsonSubTypes.Type(value = classOf[PartitionerOnToken], name = "token")))
+trait Partitioner {
+
+ /** Returns the number of partitions. */
+ def numPartitions: Int
+
+ /** Returns all the partitions. */
+ def allPartitions: List[Int] = (0 until numPartitions).toList
+
+ /** Returns the partition for the specified key.
+ *
+ * @param key a partition key to be routed to a partition
+ * @return the partition owning `key`
+ */
+ def partition(key: DecoratedKey): Int
+
+ /** Returns the involved partitions for the specified read command.
+ *
+ * @param command a read command to be routed to some partitions
+ * @return the partitions containing the all data required to satisfy `command`
+ */
+ def partitions(command: ReadCommand): List[Int]
+
+}
+
+/** Companion object for [[Partitioner]]. */
+object Partitioner {
+
+ /** The [[Partitioner]] represented by the specified JSON string.
+ *
+ * @param json a JSON string representing a [[Partitioner]]
+ * @return the partitioner represented by `json`
+ */
+ def fromJson(json: String): Partitioner =
+ JsonSerializer.fromString(json, classOf[Partitioner])
+
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnNone.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnNone.scala
new file mode 100644
index 000000000..1a54d063d
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnNone.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.partitioning
+
+import org.apache.cassandra.db.{DecoratedKey, ReadCommand}
+
+/** [[Partitioner]] with no action, equivalent to just don't partitioning the index.
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+case class PartitionerOnNone() extends Partitioner {
+
+ /** @inheritdoc */
+ override def numPartitions: Int = 1
+
+ /** @inheritdoc */
+ override def partition(key: DecoratedKey): Int = 0
+
+ /** @inheritdoc */
+ override def partitions(command: ReadCommand): List[Int] = allPartitions
+
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnToken.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnToken.scala
new file mode 100644
index 000000000..755fe4b88
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnToken.scala
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.partitioning
+
+import com.fasterxml.jackson.annotation.JsonProperty
+import com.stratio.cassandra.lucene.IndexException
+import org.apache.cassandra.db._
+import org.apache.cassandra.dht.Token
+
+/** [[Partitioner]] based on the partition key token.
+ *
+ * Partitioning on token guarantees a good load balancing between partitions while speeding up
+ * partition-directed searches to the detriment of token range searches.
+ *
+ * @param partitions the number of partitions
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+case class PartitionerOnToken(@JsonProperty("partitions") partitions: Int) extends Partitioner {
+
+ if (partitions <= 0) throw new IndexException(
+ s"The number of partitions should be strictly positive but found $partitions")
+
+ /** @inheritdoc */
+ private[this] def partition(token: Token): Int =
+ (Math.abs(token.getTokenValue.asInstanceOf[Long]) % partitions).toInt
+
+ /** @inheritdoc */
+ override def numPartitions: Int = partitions
+
+ /** @inheritdoc */
+ override def partition(key: DecoratedKey): Int = partition(key.getToken)
+
+ /** @inheritdoc */
+ override def partitions(command: ReadCommand): List[Int] = command match {
+ case c: SinglePartitionReadCommand => List(partition(c.partitionKey))
+ case c: PartitionRangeReadCommand =>
+ val range = c.dataRange()
+ val start = range.startKey().getToken
+ val stop = range.stopKey().getToken
+ if (start.equals(stop) && !start.isMinimum) List(partition(start)) else allPartitions
+ case _ => throw new IndexException(s"Unsupported read command type: ${command.getClass}")
+ }
+
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/ByteBufferUtils.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/ByteBufferUtils.scala
new file mode 100644
index 000000000..14f14f1bd
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/ByteBufferUtils.scala
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+import java.nio.ByteBuffer
+
+import org.apache.cassandra.db.marshal.{AbstractType, CompositeType}
+import org.apache.cassandra.utils.ByteBufferUtil.{readShortLength, writeShortLength}
+import org.apache.cassandra.utils.{ByteBufferUtil, Hex}
+import org.apache.lucene.util.BytesRef
+
+import scala.annotation.varargs
+
+
+/** Utility class with some [[ByteBuffer]] transformation utilities.
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+object ByteBufferUtils {
+
+ /** Returns the specified [[ByteBuffer]] as a byte array.
+ *
+ * @param bb a [[ByteBuffer]] to be converted to a byte array
+ * @return the byte array representation of `bb`
+ */
+ def asArray(bb: ByteBuffer): Array[Byte] = {
+ val duplicate = bb.duplicate
+ val bytes = new Array[Byte](duplicate.remaining)
+ duplicate.get(bytes)
+ bytes
+ }
+
+ /** Returns `true` if the specified [[ByteBuffer]] is empty, `false` otherwise.
+ *
+ * @param byteBuffer the byte buffer
+ * @return `true` if the specified [[ByteBuffer]] is empty, `false` otherwise.
+ */
+ def isEmpty(byteBuffer: ByteBuffer): Boolean = byteBuffer.remaining == 0
+
+ /** Returns the [[ByteBuffer]]s contained in the specified byte buffer according to the specified
+ * type.
+ *
+ * @param byteBuffer the byte buffer to be split
+ * @param type the type of the byte buffer
+ * @return the byte buffers contained in `byteBuffer` according to `type`
+ */
+ def split(byteBuffer: ByteBuffer, `type`: AbstractType[_]): Array[ByteBuffer] = `type` match {
+ case c: CompositeType => c.split(byteBuffer)
+ case _ => Array[ByteBuffer](byteBuffer)
+ }
+
+ /** Returns the hexadecimal [[String]] representation of the specified [[ByteBuffer]].
+ *
+ * @param byteBuffer a [[ByteBuffer]]
+ * @return the hexadecimal `string` representation of `byteBuffer`
+ */
+ def toHex(byteBuffer: ByteBuffer): String = {
+ if (byteBuffer == null) null else ByteBufferUtil.bytesToHex(byteBuffer)
+ }
+
+ /** Returns the hexadecimal [[String]] representation of the specified [[BytesRef]].
+ *
+ * @param bytesRef a [[BytesRef]]
+ * @return the hexadecimal `String` representation of `bytesRef`
+ */
+ def toHex(bytesRef: BytesRef): String = ByteBufferUtil.bytesToHex(byteBuffer(bytesRef))
+
+ /** Returns the hexadecimal [[String]] representation of the specified [[Byte]]s.
+ *
+ * @param bytes the bytes
+ * @return The hexadecimal `String` representation of `bytes`
+ */
+ def toHex(bytes: Byte*): String = toHex(bytes.toArray)
+
+ /** Returns the hexadecimal [[String]] representation of the specified [[Byte]] array.
+ *
+ * @param bytes the byte array
+ * @return The hexadecimal `String` representation of `bytes`
+ */
+ def toHex(bytes: Array[Byte]): String = Hex.bytesToHex(bytes, 0, bytes.length)
+
+ /** Returns the hexadecimal [[String]] representation of the specified [[Byte]].
+ *
+ * @param b the byte
+ * @return the hexadecimal `String` representation of `b`
+ */
+ def toHex(b: Byte): String = Hex.bytesToHex(b)
+
+ /** Returns the [[BytesRef]] representation of the specified [[ByteBuffer]].
+ *
+ * @param bb the byte buffer
+ * @return the [[BytesRef]] representation of the byte buffer
+ */
+ def bytesRef(bb: ByteBuffer): BytesRef = new BytesRef(asArray(bb))
+
+ /** Returns the [[ByteBuffer]] representation of the specified [[BytesRef]].
+ *
+ * @param bytesRef the [[BytesRef]]
+ * @return the [[ByteBuffer]] representation of `bytesRef`
+ */
+ def byteBuffer(bytesRef: BytesRef): ByteBuffer = {
+ ByteBuffer.wrap(bytesRef.bytes, bytesRef.offset, bytesRef.offset + bytesRef.length)
+ }
+
+ /** Returns the [[ByteBuffer]] representation of the specified hex [[String]].
+ *
+ * @param hex an hexadecimal representation of a byte array
+ * @return the [[ByteBuffer]] representation of `hex`
+ */
+ def byteBuffer(hex: String): ByteBuffer = {
+ if (hex == null) null else ByteBufferUtil.hexToBytes(hex)
+ }
+
+ /** Returns a [[ByteBuffer]] representing the specified array of [[ByteBuffer]]s.
+ *
+ * @param bbs an array of byte buffers
+ * @return a [[ByteBuffer]] representing `bbs`
+ */
+ @varargs
+ def compose(bbs: ByteBuffer*): ByteBuffer = {
+ val totalLength = (2 /: bbs.map(_ remaining)) (_ + _ + 2)
+ val out = ByteBuffer.allocate(totalLength)
+ writeShortLength(out, bbs.length)
+ for (bb <- bbs) {
+ writeShortLength(out, bb.remaining)
+ out.put(bb.duplicate)
+ }
+ out.flip
+ out
+ }
+
+ /** Returns the components of the specified [[ByteBuffer]] created with [[compose()]].
+ *
+ * @param bb a byte buffer created with [[compose()]]
+ * @return the components of `bb`
+ */
+ def decompose(bb: ByteBuffer): Array[ByteBuffer] = {
+ val duplicate = bb.duplicate
+ val numComponents = readShortLength(duplicate)
+ (1 to numComponents).map(
+ i => {
+ val componentLength = readShortLength(duplicate)
+ ByteBufferUtil.readBytes(duplicate, componentLength)
+ }).toArray
+ }
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Logging.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Logging.scala
new file mode 100644
index 000000000..1edef770b
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Logging.scala
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.stratio.cassandra.lucene.util
+
+import com.typesafe.scalalogging.{Logger, StrictLogging}
+
+/** Trait including a [[Logger]] instance named `logger`.
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+trait Logging extends StrictLogging {}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SchemaValidator.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SchemaValidator.scala
new file mode 100644
index 000000000..21d30f41a
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SchemaValidator.scala
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+import java.math.{BigDecimal, BigInteger}
+import java.net.InetAddress
+import java.nio.ByteBuffer
+import java.util.{Date, UUID}
+
+import com.stratio.cassandra.lucene.IndexException
+import com.stratio.cassandra.lucene.column.Column
+import com.stratio.cassandra.lucene.schema.Schema
+import org.apache.cassandra.config.CFMetaData
+import org.apache.cassandra.db.marshal._
+
+import scala.annotation.tailrec
+import scala.collection.JavaConverters._
+
+/** Object for validating a [[Schema]] against a [[CFMetaData]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+object SchemaValidator {
+
+ /** Validates the specified [[Schema]] against the specified [[CFMetaData]].
+ *
+ * @param schema a schema
+ * @param metadata a table metadata
+ */
+ def validate(schema: Schema, metadata: CFMetaData): Unit = {
+ for (mapper <- schema.mappers.values.asScala; column <- mapper.mappedColumns.asScala) {
+ validate(metadata, column, mapper.field, mapper.supportedTypes.asScala.toList)
+ }
+ }
+
+ def validate(
+ metadata: CFMetaData,
+ column: String,
+ field: String,
+ supportedTypes: List[Class[_]]) {
+
+ val cellName = Column.parseCellName(column)
+ val cellDefinition = metadata.getColumnDefinition(UTF8Type.instance.decompose(cellName))
+
+ if (cellDefinition == null) {
+ throw new IndexException("No column definition '{}' for mapper '{}'", cellName, field)
+ }
+ if (cellDefinition.isStatic) {
+ throw new IndexException("Lucene indexes are not allowed on static columns as '{}'", column)
+ }
+
+ def checkSupported(t: AbstractType[_], mapper: String) {
+ if (!supports(t, supportedTypes)) {
+ throw new IndexException(
+ "Type '{}' in column '{}' is not supported by mapper '{}'",
+ t,
+ mapper,
+ field)
+ }
+ }
+
+ val cellType = cellDefinition.`type`
+ val udtNames = Column.parseUdtNames(column)
+ if (udtNames.isEmpty) {
+ checkSupported(cellType, cellName)
+ } else {
+ var column = Column.apply(cellName)
+ var currentType = cellType
+ for (i <- udtNames.indices) {
+ column = column.withUDTName(udtNames(i))
+ childType(currentType, udtNames(i)) match {
+ case None => throw new IndexException(
+ s"No column definition '${column.mapper}' for field '$field'")
+ case Some(n) if i == udtNames.indices.last => checkSupported(n, column.mapper)
+ case Some(n) => currentType = n
+ }
+ }
+ }
+ }
+
+ @tailrec
+ def childType(parent: AbstractType[_], child: String): Option[AbstractType[_]] = parent match {
+ case t: ReversedType[_] => childType(t.baseType, child)
+ case t: SetType[_] => childType(t.nameComparator, child)
+ case t: ListType[_] => childType(t.valueComparator, child)
+ case t: MapType[_, _] => childType(t.valueComparator, child)
+ case t: UserType =>
+ (0 until t.fieldNames.size).find(t.fieldNameAsString(_) == child).map(t.fieldType)
+ case t: TupleType => (0 until t.size).find(_.toString == child).map(t.`type`)
+ case _ => None
+ }
+
+ @tailrec
+ def supports(
+ candidateType: AbstractType[_],
+ supportedTypes: Seq[Class[_]]): Boolean = candidateType match {
+ case t: ReversedType[_] => supports(t.baseType, supportedTypes)
+ case t: SetType[_] => supports(t.getElementsType, supportedTypes)
+ case t: ListType[_] => supports(t.getElementsType, supportedTypes)
+ case t: MapType[_, _] => supports(t.getValuesType, supportedTypes)
+ case _ =>
+ val native = nativeType(candidateType)
+ supportedTypes.exists(_ isAssignableFrom native)
+ }
+
+ def nativeType(validator: AbstractType[_]): Class[_] = validator match {
+ case _: UTF8Type | _: AsciiType => classOf[String]
+ case _: SimpleDateType | _: TimestampType => classOf[Date]
+ case _: UUIDType | _: LexicalUUIDType | _: TimeUUIDType => classOf[UUID]
+ case _: ShortType => classOf[java.lang.Short]
+ case _: ByteType => classOf[java.lang.Byte]
+ case _: Int32Type => classOf[Integer]
+ case _: LongType => classOf[java.lang.Long]
+ case _: IntegerType => classOf[BigInteger]
+ case _: FloatType => classOf[java.lang.Float]
+ case _: DoubleType => classOf[java.lang.Double]
+ case _: DecimalType => classOf[BigDecimal]
+ case _: BooleanType => classOf[java.lang.Boolean]
+ case _: BytesType => classOf[ByteBuffer]
+ case _: InetAddressType => classOf[InetAddress]
+ case _ => throw new IndexException(s"Unsupported Cassandra data type: ${validator.getClass}")
+ }
+
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SimplePartitionIterator.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SimplePartitionIterator.scala
index 15270d6d9..11c0b3dd6 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SimplePartitionIterator.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SimplePartitionIterator.scala
@@ -18,12 +18,12 @@ package com.stratio.cassandra.lucene.util
import org.apache.cassandra.db.partitions.PartitionIterator
import org.apache.cassandra.db.rows.RowIterator
-/** [[PartitionIterator]] composed by a list of [[SimpleRowIterator]]s.
+/** [[PartitionIterator]] composed by a list of [[SingleRowIterator]]s.
*
* @param rows the rows to be iterated
* @author Andres de la Pena `adelapena@stratio.com`
*/
-class SimplePartitionIterator(rows: Seq[SimpleRowIterator]) extends PartitionIterator {
+class SimplePartitionIterator(rows: Seq[SingleRowIterator]) extends PartitionIterator {
private[this] val iterator = rows.iterator
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SingleRowIterator.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SingleRowIterator.scala
new file mode 100644
index 000000000..ff81e5db9
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/SingleRowIterator.scala
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+import java.util.Collections
+
+import org.apache.cassandra.config.CFMetaData
+import org.apache.cassandra.db.rows.{Row, RowIterator}
+import org.apache.cassandra.db.{DecoratedKey, PartitionColumns}
+
+/** [[RowIterator]] representing a single CQL [[Row]], gotten from the head position of the
+ * specified [[RowIterator]]. Any other rows in the specified iterator won't be read.
+ *
+ * @param iterator the [[Row]] iterator
+ * @param headRow a row to override the first row in the iterator
+ * @param decorator a function to decorate the row
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class SingleRowIterator(
+ iterator: RowIterator,
+ headRow: Option[Row] = None,
+ decorator: Option[Row => Row] = None)
+ extends RowIterator {
+
+ val row = headRow.getOrElse(iterator.next)
+
+ private[this] val _metadata = iterator.metadata
+ private[this] val _partitionKey = iterator.partitionKey
+ private[this] val _columns = iterator.columns
+ private[this] val _staticRow = iterator.staticRow
+ private[this] val singleIterator = Collections.singletonList(row).iterator
+
+ /** Return a copy of this iterator with the specified row decorator.
+ *
+ * @param decorator a function to decorate the returned row
+ * @return a new iterator with the decorator
+ */
+ def decorated(decorator: Row => Row): SingleRowIterator = {
+ new SingleRowIterator(iterator, Some(row), Option(decorator))
+ }
+
+ /** @inheritdoc */
+ override def metadata: CFMetaData = _metadata
+
+ /** @inheritdoc */
+ override def isReverseOrder: Boolean = false
+
+ /** @inheritdoc */
+ override def columns: PartitionColumns = _columns
+
+ /** @inheritdoc */
+ override def partitionKey: DecoratedKey = _partitionKey
+
+ /** @inheritdoc */
+ override def staticRow: Row = _staticRow
+
+ /** @inheritdoc */
+ override def close() {}
+
+ /** @inheritdoc */
+ override def hasNext: Boolean = singleIterator.hasNext
+
+ /** @inheritdoc */
+ override def next: Row = {
+ val row = singleIterator.next
+ decorator.map(_ apply row).getOrElse(row)
+ }
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/TaskQueue.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/TaskQueue.scala
index 86626ac8f..ddda5de94 100644
--- a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/TaskQueue.scala
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/TaskQueue.scala
@@ -17,12 +17,11 @@ package com.stratio.cassandra.lucene.util
import java.io.Closeable
import java.util.concurrent.TimeUnit.DAYS
+import java.util.concurrent._
import java.util.concurrent.locks.ReentrantReadWriteLock
import com.stratio.cassandra.lucene.IndexException
-import com.stratio.cassandra.lucene.util.JavaConversions.asJavaCallable
-import com.stratio.cassandra.lucene.util.TaskQueue._
-import org.slf4j.LoggerFactory
+import org.apache.commons.lang3.concurrent.BasicThreadFactory
import scala.concurrent.ExecutionException
@@ -32,7 +31,7 @@ import scala.concurrent.ExecutionException
*
* @author Andres de la Pena `adelapena@stratio.com`
*/
-sealed trait TaskQueue extends Closeable {
+sealed trait TaskQueue extends Closeable with Logging {
/** Submits a non value-returning task for asynchronous execution.
*
@@ -45,8 +44,7 @@ sealed trait TaskQueue extends Closeable {
*/
def submitAsynchronous[A](id: AnyRef, task: () => A): Unit
- /**
- * Submits a non value-returning task for synchronous execution. It waits for all synchronous
+ /** Submits a non value-returning task for synchronous execution. It waits for all synchronous
* tasks to be completed.
*
* @param task a task to be executed synchronously
@@ -65,7 +63,7 @@ private class TaskQueueSync extends TaskQueue {
override def submitSynchronous[A](task: () => A): A = task.apply
/** @inheritdoc */
- override def close() = {}
+ override def close(): Unit = {}
}
@@ -76,15 +74,18 @@ private class TaskQueueSync extends TaskQueue {
*/
private class TaskQueueAsync(numThreads: Int, queuesSize: Int) extends TaskQueue {
- private val pools = (1 to numThreads).map(i => new BlockingExecutor(1, queuesSize, 1, DAYS))
private val lock = new ReentrantReadWriteLock(true)
+ private val pools = (1 to numThreads)
+ .map(index => new ArrayBlockingQueue[Runnable](queuesSize, true))
+ .map(queue => new ThreadPoolExecutor(1, 1, 1, DAYS, queue,
+ new BasicThreadFactory.Builder().namingPattern("lucene-indexer-%d").build(),
+ (task, executor) => if (!executor.isShutdown) executor.getQueue.put(task)))
/** @inheritdoc */
override def submitAsynchronous[A](id: AnyRef, task: () => A): Unit = {
lock.readLock.lock()
try {
- val pool = pools(Math.abs(id.hashCode % numThreads)) // Choose pool
- pool.submit(task)
+ pools(Math.abs(id.hashCode % numThreads)).submit(() => task.apply())
} catch {
case e: Exception =>
logger.error("Task queue asynchronous submission failed", e)
@@ -94,9 +95,9 @@ private class TaskQueueAsync(numThreads: Int, queuesSize: Int) extends TaskQueue
/** @inheritdoc */
override def submitSynchronous[A](task: () => A): A = {
- lock.writeLock().lock()
+ lock.writeLock.lock()
try {
- pools.map(_.submit(() => {})).foreach(_.get()) // Wait for queued tasks completion
+ pools.map(_.submit(() => None)).map(_.get()) // Wait for queued tasks completion
task.apply // Run synchronous task
} catch {
case e: InterruptedException =>
@@ -112,7 +113,7 @@ private class TaskQueueAsync(numThreads: Int, queuesSize: Int) extends TaskQueue
}
/** @inheritdoc */
- override def close() = {
+ override def close(): Unit = {
lock.writeLock.lock()
try pools.foreach(_.shutdown())
finally lock.writeLock.unlock()
@@ -120,10 +121,9 @@ private class TaskQueueAsync(numThreads: Int, queuesSize: Int) extends TaskQueue
}
+/** Companion object for [[TaskQueue]]. */
object TaskQueue {
- val logger = LoggerFactory.getLogger(classOf[TaskQueue])
-
/** Returns a new [[TaskQueue]].
*
* @param numThreads the number of executor threads
@@ -131,7 +131,7 @@ object TaskQueue {
* @return a new task queue
*/
def build(numThreads: Int, queuesSize: Int): TaskQueue = {
- if (numThreads > 0) new TaskQueueAsync(numThreads, queuesSize) else new TaskQueueSync()
+ if (numThreads > 0) new TaskQueueAsync(numThreads, queuesSize) else new TaskQueueSync
}
}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/TimeCounter.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/TimeCounter.scala
new file mode 100644
index 000000000..a4cce35e7
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/TimeCounter.scala
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+/** Immutable class for measuring time durations in milliseconds.
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+sealed abstract class TimeCounter {
+
+ /** Returns the measured time in milliseconds.
+ *
+ * @return the measured time in milliseconds
+ */
+ def time: Long
+
+ /** @inheritdoc */
+ override def toString: String = s"$time ms"
+
+}
+
+/** A started [[TimeCounter]].
+ *
+ * @param startTime the start time in milliseconds
+ * @param runTime the already run time in milliseconds
+ */
+class StartedTimeCounter(startTime: Long, runTime: Long) extends TimeCounter {
+
+ /** @inheritdoc */
+ override def time: Long = runTime + System.currentTimeMillis - startTime
+
+ /** Returns a new stopped time counter.
+ *
+ * @return a new stopped time counter
+ */
+ def stop: StoppedTimeCounter = new StoppedTimeCounter(time)
+
+ /** @inheritdoc */
+ override def toString: String = s"$time ms"
+
+}
+
+/** A stopped [[TimeCounter]].
+ *
+ * @param runTime the total run time in milliseconds
+ */
+class StoppedTimeCounter(runTime: Long) extends TimeCounter {
+
+ /** @inheritdoc */
+ override def time: Long = runTime
+
+ /** Returns a new started time counter.
+ *
+ * @return a new started time counter
+ */
+ def start: StartedTimeCounter = new StartedTimeCounter(System.currentTimeMillis, time)
+
+}
+
+/** Companion object for [[TimeCounter]]. */
+object TimeCounter {
+
+ /** Returns a new [[StoppedTimeCounter]].
+ *
+ * @return a new stopped time counter
+ */
+ def create: StoppedTimeCounter = new StoppedTimeCounter(0)
+
+ /** Returns a new [[StartedTimeCounter]].
+ *
+ * @return a new started time counter
+ */
+ def start: StartedTimeCounter = create.start
+
+ /** Runs the specified closure and returns a stopped time counter measuring its execution time.
+ *
+ * @param f the closure to be run and measured
+ * @return a new stopped time counter
+ */
+ def apply(f: () => Unit): StoppedTimeCounter = {
+ val counter = create.start
+ f.apply()
+ counter.stop
+ }
+
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Tracer.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Tracer.scala
new file mode 100644
index 000000000..fa23fd3ea
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Tracer.scala
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+import org.apache.cassandra.tracing.{Tracing => Tracer}
+
+/** Wrapper for [[Tracer]] avoiding test environment failures.
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+final class Tracer extends Logging {
+
+ /** If Cassandra tracing is enabled. */
+ lazy val canTrace: Boolean = try {
+ Tracer.isTracing
+ true
+ } catch {
+ case e: Error =>
+ logger.warn(s"Unable to trace: ${e.getMessage}", e)
+ false
+ }
+
+ /** Traces the specified string message.
+ *
+ * @param message the message to be traced
+ */
+ def trace(message: => String) = if (canTrace && Tracer.isTracing) Tracer.trace(message)
+}
diff --git a/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Tracing.scala b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Tracing.scala
new file mode 100644
index 000000000..6cafbe6ff
--- /dev/null
+++ b/plugin/src/main/scala/com/stratio/cassandra/lucene/util/Tracing.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.stratio.cassandra.lucene.util
+
+/** Trait including a [[Tracer]] instance.
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+trait Tracing {
+
+ protected val tracer: Tracer = new Tracer
+
+}
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoShapeTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoShapeTest.java
index db02569b0..090545b6a 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoShapeTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoShapeTest.java
@@ -15,7 +15,6 @@
*/
package com.stratio.cassandra.lucene.common;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
import org.junit.Test;
import java.io.IOException;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBBoxTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBBoxTest.java
index 1fa44a63a..6d16ba0d2 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBBoxTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBBoxTest.java
@@ -17,13 +17,12 @@
import com.spatial4j.core.shape.jts.JtsGeometry;
import com.stratio.cassandra.lucene.search.condition.AbstractConditionTest;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
import org.junit.Test;
import java.io.IOException;
import static com.stratio.cassandra.lucene.common.GeoTransformation.BBox;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBufferTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBufferTest.java
index e742f9051..915ede8ea 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBufferTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationBufferTest.java
@@ -16,7 +16,6 @@
package com.stratio.cassandra.lucene.common;
import com.spatial4j.core.shape.jts.JtsGeometry;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
import com.vividsolutions.jts.geom.Geometry;
import org.junit.Test;
@@ -25,8 +24,8 @@
import static com.stratio.cassandra.lucene.common.GeoDistanceUnit.KILOMETRES;
import static com.stratio.cassandra.lucene.common.GeoDistanceUnit.METRES;
import static com.stratio.cassandra.lucene.common.GeoTransformation.Buffer;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.CONTEXT;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.CONTEXT;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationCentroidTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationCentroidTest.java
index e9a0065e5..90618c5af 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationCentroidTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationCentroidTest.java
@@ -16,13 +16,12 @@
package com.stratio.cassandra.lucene.common;
import com.spatial4j.core.shape.jts.JtsGeometry;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
import org.junit.Test;
import java.io.IOException;
import static com.stratio.cassandra.lucene.common.GeoTransformation.Centroid;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationConvexHullTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationConvexHullTest.java
index 1b166bc15..736f09349 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationConvexHullTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/common/GeoTransformationConvexHullTest.java
@@ -16,13 +16,12 @@
package com.stratio.cassandra.lucene.common;
import com.spatial4j.core.shape.jts.JtsGeometry;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
import org.junit.Test;
import java.io.IOException;
import static com.stratio.cassandra.lucene.common.GeoTransformation.ConvexHull;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaBuilderTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaBuilderTest.java
index e912fbd13..e1b972783 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaBuilderTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaBuilderTest.java
@@ -50,7 +50,7 @@ public void testBuild() throws Exception {
.mapper("text", textMapper().analyzer("snowball"))
.mapper("uuid", uuidMapper())
.build();
- assertEquals("Failed schema building", EnglishAnalyzer.class, schema.defaultAnalyzer().getClass());
+ assertEquals("Failed schema building", EnglishAnalyzer.class, schema.defaultAnalyzer.getClass());
assertEquals("Failed schema building", BlobMapper.class, schema.mapper("blob").getClass());
assertEquals("Failed schema building", BooleanMapper.class, schema.mapper("bool").getClass());
assertEquals("Failed schema building", DateMapper.class, schema.mapper("date").getClass());
@@ -73,7 +73,7 @@ public void testBuildNumeric() throws Exception {
.mapper("int", integerMapper().boost(0.3f))
.mapper("long", longMapper())
.build();
- assertEquals("Failed schema building", EnglishAnalyzer.class, schema.defaultAnalyzer().getClass());
+ assertEquals("Failed schema building", EnglishAnalyzer.class, schema.defaultAnalyzer.getClass());
assertEquals("Failed schema building", BigIntegerMapper.class, schema.mapper("big_int").getClass());
assertEquals("Failed schema building", BigDecimalMapper.class, schema.mapper("big_dec").getClass());
assertEquals("Failed schema building", DoubleMapper.class, schema.mapper("double").getClass());
@@ -96,7 +96,7 @@ public void testBuildComplex() throws Exception {
.mapper("date_range", dateRangeMapper("from", "to"))
.mapper("geo", geoPointMapper("lat", "lon"))
.build();
- assertEquals("Failed schema building", EnglishAnalyzer.class, schema.defaultAnalyzer().getClass());
+ assertEquals("Failed schema building", EnglishAnalyzer.class, schema.defaultAnalyzer.getClass());
assertEquals("Failed schema building", BitemporalMapper.class, schema.mapper("bitemporal").getClass());
assertEquals("Failed schema building", DateRangeMapper.class, schema.mapper("date_range").getClass());
assertEquals("Failed schema building", GeoPointMapper.class, schema.mapper("geo").getClass());
@@ -166,7 +166,7 @@ public void testFromJsonRegular() throws IOException {
"uuid:{type:\"uuid\"}" +
"}}";
Schema schema = SchemaBuilder.fromJson(json).build();
- assertEquals("Failed schema JSON parsing", EnglishAnalyzer.class, schema.defaultAnalyzer().getClass());
+ assertEquals("Failed schema JSON parsing", EnglishAnalyzer.class, schema.defaultAnalyzer.getClass());
assertEquals("Failed schema JSON parsing", BlobMapper.class, schema.mapper("blob").getClass());
assertEquals("Failed schema JSON parsing", BooleanMapper.class, schema.mapper("bool").getClass());
assertEquals("Failed schema JSON parsing", DateMapper.class, schema.mapper("date").getClass());
@@ -192,7 +192,7 @@ public void testFromJsonNumeric() throws IOException {
"int:{type:\"integer\"}," +
"long:{type:\"long\"}}}";
Schema schema = SchemaBuilder.fromJson(json).build();
- assertEquals("Failed schema JSON parsing", EnglishAnalyzer.class, schema.defaultAnalyzer().getClass());
+ assertEquals("Failed schema JSON parsing", EnglishAnalyzer.class, schema.defaultAnalyzer.getClass());
assertEquals("Failed schema JSON parsing", BigIntegerMapper.class, schema.mapper("big_int").getClass());
assertEquals("Failed schema JSON parsing", BigDecimalMapper.class, schema.mapper("big_dec").getClass());
assertEquals("Failed schema JSON parsing", DoubleMapper.class, schema.mapper("double").getClass());
@@ -214,7 +214,7 @@ public void testFromJsonComplex() throws IOException {
"geo:{type:\"geo_point\",latitude:\"lat\",longitude:\"lon\"}" +
"}}";
Schema schema = SchemaBuilder.fromJson(json).build();
- assertEquals("Failed schema JSON parsing", EnglishAnalyzer.class, schema.defaultAnalyzer().getClass());
+ assertEquals("Failed schema JSON parsing", EnglishAnalyzer.class, schema.defaultAnalyzer.getClass());
assertEquals("Failed schema JSON parsing", BitemporalMapper.class, schema.mapper("bitemporal").getClass());
assertEquals("Failed schema JSON parsing", DateRangeMapper.class, schema.mapper("date_range").getClass());
assertEquals("Failed schema JSON parsing", GeoPointMapper.class, schema.mapper("geo").getClass());
@@ -239,7 +239,7 @@ public void testFromJSONWithNullAnalyzers() throws IOException {
Schema schema = SchemaBuilder.fromJson(json).build();
- Analyzer defaultAnalyzer = schema.defaultAnalyzer();
+ Analyzer defaultAnalyzer = schema.defaultAnalyzer;
assertTrue("Expected english analyzer", defaultAnalyzer instanceof EnglishAnalyzer);
Mapper idMapper = schema.mapper("id");
@@ -276,7 +276,7 @@ public void testFromJSONWithEmptyAnalyzers() throws IOException {
" }'";
Schema schema = SchemaBuilder.fromJson(json).build();
- Analyzer defaultAnalyzer = schema.defaultAnalyzer();
+ Analyzer defaultAnalyzer = schema.defaultAnalyzer;
assertTrue("Expected EnglishAnalyzer", defaultAnalyzer instanceof EnglishAnalyzer);
Mapper idMapper = schema.mapper("id");
@@ -310,7 +310,7 @@ public void testParseJSONWithNullDefaultAnalyzer() throws IOException {
" }'";
Schema schema = SchemaBuilder.fromJson(json).build();
- Analyzer defaultAnalyzer = schema.defaultAnalyzer();
+ Analyzer defaultAnalyzer = schema.defaultAnalyzer;
assertEquals("Expected default analyzer",
StandardAnalyzers.DEFAULT.get().getClass(),
defaultAnalyzer.getClass());
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaTest.java
index 866dd6e2b..3f4a2a6c4 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/SchemaTest.java
@@ -40,7 +40,7 @@ public void testGetDefaultAnalyzer() {
Map mappers = new HashMap<>();
Map analyzers = new HashMap<>();
Schema schema = new Schema(new EnglishAnalyzer(), mappers, analyzers);
- Analyzer analyzer = schema.defaultAnalyzer();
+ Analyzer analyzer = schema.defaultAnalyzer;
assertEquals("Expected english analyzer", EnglishAnalyzer.class, analyzer.getClass());
schema.close();
}
@@ -50,7 +50,7 @@ public void testGetDefaultAnalyzerNotSpecified() {
Map mappers = new HashMap<>();
Map analyzers = new HashMap<>();
Schema schema = new Schema(new EnglishAnalyzer(), mappers, analyzers);
- Analyzer analyzer = schema.defaultAnalyzer();
+ Analyzer analyzer = schema.defaultAnalyzer;
assertEquals("Expected default analyzer", EnglishAnalyzer.class, analyzer.getClass());
schema.close();
}
@@ -86,7 +86,7 @@ public void testGetAnalyzerEmpty() {
@Test
public void testValidateColumns() {
Schema schema = SchemaBuilders.schema().mapper("field1", stringMapper()).build();
- Columns columns = new Columns().add("field1", "value");
+ Columns columns = Columns.empty().add("field1", "value");
schema.validate(columns);
schema.close();
}
@@ -94,7 +94,7 @@ public void testValidateColumns() {
@Test(expected = IndexException.class)
public void testValidateColumnsFailing() {
Schema schema = SchemaBuilders.schema().mapper("field1", integerMapper().validated(true)).build();
- Columns columns = new Columns().add("field1", "value");
+ Columns columns = Columns.empty().add("field1", "value");
schema.validate(columns);
schema.close();
}
@@ -102,7 +102,7 @@ public void testValidateColumnsFailing() {
@Test
public void testAddFields() {
Schema schema = SchemaBuilders.schema().mapper("field1", stringMapper()).build();
- Columns columns = new Columns().add("field1", "value");
+ Columns columns = Columns.empty().add("field1", "value");
List fields = schema.indexableFields(columns);
assertFalse("Expected true", fields.isEmpty());
schema.close();
@@ -111,7 +111,7 @@ public void testAddFields() {
@Test
public void testAddFieldsFailing() {
Schema schema = SchemaBuilders.schema().mapper("field1", integerMapper()).build();
- Columns columns = new Columns().add("field1", "value");
+ Columns columns = Columns.empty().add("field1", "value");
List fields = schema.indexableFields(columns);
assertTrue("Expected true", fields.isEmpty());
schema.close();
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilderTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilderTest.java
index dc39ec477..2825f8c5c 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilderTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/ClasspathAnalyzerBuilderTest.java
@@ -16,7 +16,7 @@
package com.stratio.cassandra.lucene.schema.analysis;
import com.stratio.cassandra.lucene.IndexException;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.junit.Test;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilderTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilderTest.java
index 0440ddf8d..0e2960de1 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilderTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/analysis/SnowballAnalyzerBuilderTest.java
@@ -16,7 +16,7 @@
package com.stratio.cassandra.lucene.schema.analysis;
import com.stratio.cassandra.lucene.IndexException;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/AbstractMapperTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/AbstractMapperTest.java
index d384e396c..df8cc3e5b 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/AbstractMapperTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/AbstractMapperTest.java
@@ -16,7 +16,7 @@
package com.stratio.cassandra.lucene.schema.mapping;
import com.stratio.cassandra.lucene.schema.mapping.builder.MapperBuilder;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapperTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapperTest.java
index 54b24944a..cbdb8d188 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapperTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/BitemporalMapperTest.java
@@ -28,7 +28,6 @@
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
-import java.nio.ByteBuffer;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
@@ -36,7 +35,7 @@
import java.util.UUID;
import static com.stratio.cassandra.lucene.schema.SchemaBuilders.bitemporalMapper;
-import static com.stratio.cassandra.lucene.util.DateParser.DEFAULT_PATTERN;
+import static com.stratio.cassandra.lucene.common.DateParser.DEFAULT_PATTERN;
import static org.junit.Assert.*;
/**
@@ -185,13 +184,13 @@ private static void testReadField(String pattern, String expected, Object value)
BitemporalDateTime dateTime = new BitemporalDateTime(date);
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").pattern(pattern).build("f");
Columns columns;
- columns = new Columns().add("vtFrom", value);
+ columns = Columns.empty().add("vtFrom", value);
assertEquals("Wrong VT from date parsing", dateTime, mapper.readBitemporalDate(columns, "vtFrom"));
- columns = new Columns().add("vtTo", value);
+ columns = Columns.empty().add("vtTo", value);
assertEquals("Wrong VT to date parsing", dateTime, mapper.readBitemporalDate(columns, "vtTo"));
- columns = new Columns().add("ttFrom", value);
+ columns = Columns.empty().add("ttFrom", value);
assertEquals("Wrong TT from date parsing", dateTime, mapper.readBitemporalDate(columns, "ttFrom"));
- columns = new Columns().add("ttTo", value);
+ columns = Columns.empty().add("ttTo", value);
assertEquals("Wrong TT to date parsing", dateTime, mapper.readBitemporalDate(columns, "ttTo"));
}
@@ -246,7 +245,7 @@ public void testReadFieldFromDateColumn() throws ParseException {
@Test(expected = IndexException.class)
public void testAddFieldsWithVtToSmallerThanVtFromFromLongColumn() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").pattern("y").build("f");
- Columns columns = new Columns().add("vtFrom", 5L)
+ Columns columns = Columns.empty().add("vtFrom", 5L)
.add("vtTo", 0L)
.add("ttFrom", 0L)
.add("ttTo", 0L);
@@ -256,7 +255,7 @@ public void testAddFieldsWithVtToSmallerThanVtFromFromLongColumn() {
@Test(expected = IndexException.class)
public void testAddFieldsWithTtToSmallerThanTtFromFromLongColumn() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").pattern("y").build("f");
- Columns columns = new Columns().add("vtFrom", 0L)
+ Columns columns = Columns.empty().add("vtFrom", 0L)
.add("vtTo", 0L)
.add("ttFrom", 5L)
.add("ttTo", 0L);
@@ -290,10 +289,11 @@ private void testAddFieldsOnlyThese(List fields,
public void testAddFieldsT1() {
String nowValue = "2100/01/01 00:00:00.001 GMT";
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").nowValue(nowValue).build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("vtTo", nowValue)
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", nowValue);
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("vtTo", nowValue)
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", nowValue);
List fields = mapper.indexableFields(columns);
testAddFieldsOnlyThese(fields, new String[]{"f.ttFrom", "f.ttTo", "f.vtFrom", "f.vtTo"}, new String[0]);
}
@@ -302,10 +302,11 @@ public void testAddFieldsT1() {
public void testAddFieldsT2() {
String nowValue = "2100/01/01 00:00:00.000 GMT";
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").nowValue(nowValue).build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("vtTo", "2015/02/28 01:02:03.004 GMT")
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", nowValue);
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("vtTo", "2015/02/28 01:02:03.004 GMT")
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", nowValue);
List fields = mapper.indexableFields(columns);
testAddFieldsOnlyThese(fields, new String[]{"f.ttFrom", "f.ttTo", "f.vtFrom", "f.vtTo"}, new String[0]);
}
@@ -314,10 +315,11 @@ public void testAddFieldsT2() {
public void testAddFieldsT3() {
String nowValue = "2100/01/01 00:00:00.000 GMT";
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").nowValue(nowValue).build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("vtTo", nowValue)
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("vtTo", nowValue)
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", "2015/02/28 01:02:03.004 GMT");
List fields = mapper.indexableFields(columns);
testAddFieldsOnlyThese(fields, new String[]{"f.ttFrom", "f.ttTo", "f.vtFrom", "f.vtTo"}, new String[0]);
}
@@ -325,10 +327,11 @@ public void testAddFieldsT3() {
@Test
public void testAddFieldsT4() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("vtTo", "2015/02/28 01:02:03.004 GMT")
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("vtTo", "2015/02/28 01:02:03.004 GMT")
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", "2015/02/28 01:02:03.004 GMT");
List fields = mapper.indexableFields(columns);
testAddFieldsOnlyThese(fields, new String[]{"f.ttFrom", "f.ttTo", "f.vtFrom", "f.vtTo"}, new String[0]);
}
@@ -336,7 +339,7 @@ public void testAddFieldsT4() {
@Test
public void testAddFieldsAllNull() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns();
+ Columns columns = Columns.empty();
List fields = mapper.indexableFields(columns);
assertEquals("Null columns should produce no fields", 0, fields.size());
}
@@ -344,56 +347,62 @@ public void testAddFieldsAllNull() {
@Test(expected = IndexException.class)
public void testAddFieldsVtFromNull() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns().add("vtTo", "2015/02/28 01:02:03.004 GMT")
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtTo", "2015/02/28 01:02:03.004 GMT")
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", "2015/02/28 01:02:03.004 GMT");
mapper.indexableFields(columns);
}
@Test(expected = IndexException.class)
public void testAddFieldsVtToNull() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", "2015/02/28 01:02:03.004 GMT");
mapper.indexableFields(columns);
}
@Test(expected = IndexException.class)
public void testAddFieldsTtFromNull() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("vtTo", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("vtTo", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", "2015/02/28 01:02:03.004 GMT");
mapper.indexableFields(columns);
}
@Test(expected = IndexException.class)
public void testAddFieldsTtToNull() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("vtTo", "2015/02/28 01:02:03.004 GMT")
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("vtTo", "2015/02/28 01:02:03.004 GMT")
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT");
mapper.indexableFields(columns);
}
@Test(expected = IndexException.class)
public void testAddFieldsVtFromAfterVtToNull() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.005 GMT")
- .add("vtTo", "2015/02/28 01:02:03.004 GMT")
- .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
- .add("ttTo", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.005 GMT")
+ .add("vtTo", "2015/02/28 01:02:03.004 GMT")
+ .add("ttFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("ttTo", "2015/02/28 01:02:03.004 GMT");
mapper.indexableFields(columns);
}
@Test(expected = IndexException.class)
public void testAddFieldsTtFromAfterTtToNull() {
BitemporalMapper mapper = bitemporalMapper("vtFrom", "vtTo", "ttFrom", "ttTo").build("f");
- Columns columns = new Columns().add("vtFrom", "2015/02/28 01:02:03.004 GMT")
- .add("vtTo", "2015/02/28 01:02:03.004 GMT")
- .add("ttFrom", "2015/02/28 01:02:03.005 GMT")
- .add("ttTo", "2015/02/28 01:02:03.004 GMT");
+ Columns columns = Columns.empty()
+ .add("vtFrom", "2015/02/28 01:02:03.004 GMT")
+ .add("vtTo", "2015/02/28 01:02:03.004 GMT")
+ .add("ttFrom", "2015/02/28 01:02:03.005 GMT")
+ .add("ttTo", "2015/02/28 01:02:03.004 GMT");
mapper.indexableFields(columns);
}
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateMapperTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateMapperTest.java
index c78a8ee0d..e21ffda40 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateMapperTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateMapperTest.java
@@ -29,7 +29,7 @@
import java.util.UUID;
import static com.stratio.cassandra.lucene.schema.SchemaBuilders.dateMapper;
-import static com.stratio.cassandra.lucene.util.DateParser.DEFAULT_PATTERN;
+import static com.stratio.cassandra.lucene.common.DateParser.DEFAULT_PATTERN;
import static org.junit.Assert.*;
public class DateMapperTest extends AbstractMapperTest {
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapperTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapperTest.java
index 1c16fcafb..b90d21b74 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapperTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/DateRangeMapperTest.java
@@ -30,7 +30,7 @@
import java.util.UUID;
import static com.stratio.cassandra.lucene.schema.SchemaBuilders.dateRangeMapper;
-import static com.stratio.cassandra.lucene.util.DateParser.DEFAULT_PATTERN;
+import static com.stratio.cassandra.lucene.common.DateParser.DEFAULT_PATTERN;
import static org.junit.Assert.*;
public class DateRangeMapperTest extends AbstractMapperTest {
@@ -106,9 +106,9 @@ private static void testReadField(String pattern, String expected, Object value)
Date date = new SimpleDateFormat(pattern).parse(expected);
DateRangeMapper mapper = dateRangeMapper("from", "to").pattern(pattern).build("name");
Columns columns;
- columns = new Columns().add("from", value);
+ columns = Columns.empty().add("from", value);
assertEquals("From is not properly parsed", date, mapper.readFrom(columns));
- columns = new Columns().add("to", value);
+ columns = Columns.empty().add("to", value);
assertEquals("To is not properly parsed", date, mapper.readTo(columns));
}
@@ -156,7 +156,7 @@ public void testReadFieldFromUnparseableStringColumn() throws ParseException {
@Test
public void testReadFieldWithNullColumn() {
DateRangeMapper mapper = dateRangeMapper("from", "to").build("name");
- assertNull("From is not properly parsed", mapper.readFrom(new Columns()));
+ assertNull("From is not properly parsed", mapper.readFrom(Columns.empty()));
}
@Test(expected = IndexException.class)
@@ -168,7 +168,7 @@ public void testSortField() {
@Test
public void testAddFields() {
DateRangeMapper mapper = dateRangeMapper("from", "to").pattern("yyyy-MM-dd").build("name");
- Columns columns = new Columns().add("from", "1982-11-27").add("to", "2016-11-27");
+ Columns columns = Columns.empty().add("from", "1982-11-27").add("to", "2016-11-27");
List indexableFields = mapper.indexableFields(columns);
assertEquals("Indexed field is not created", 1, indexableFields.size());
@@ -179,7 +179,7 @@ public void testAddFields() {
@Test
public void testAddFieldsWithNullColumns() {
DateRangeMapper mapper = dateRangeMapper("from", "to").build("name");
- Columns columns = new Columns();
+ Columns columns = Columns.empty();
List indexableFields = mapper.indexableFields(columns);
assertEquals("Null columns must not produce fields", 0, indexableFields.size());
}
@@ -187,14 +187,14 @@ public void testAddFieldsWithNullColumns() {
@Test(expected = IndexException.class)
public void testAddFieldsWithBadSortColumns() {
DateRangeMapper mapper = dateRangeMapper("from", "to").pattern("yyyy").build("name");
- Columns columns = new Columns().add("from", "1982").add("to", "1980");
+ Columns columns = Columns.empty().add("from", "1982").add("to", "1980");
mapper.indexableFields(columns);
}
@Test
public void testAddFieldsWithSameColumns() {
DateRangeMapper mapper = dateRangeMapper("from", "to").pattern("yyyy").build("name");
- Columns columns = new Columns().add("from", 2000).add("to", 2000);
+ Columns columns = Columns.empty().add("from", 2000).add("to", 2000);
List indexableFields = mapper.indexableFields(columns);
assertEquals("Indexed field is not created", 1, indexableFields.size());
}
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapperTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapperTest.java
index 690bc5eb7..bef6ca6a6 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapperTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoPointMapperTest.java
@@ -120,187 +120,187 @@ public void testConstructorWithBlankLongitude() {
@Test
public void testGetLatitudeFromNullColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat").add("lon", 0);
+ Columns columns = Columns.empty().add("lat").add("lon", 0);
assertNull("Latitude is not properly parsed", mapper.readLatitude(columns));
}
@Test
public void testGetLatitudeFromIntColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 5).add("lon", 0);
+ Columns columns = Columns.empty().add("lat", 5).add("lon", 0);
assertEquals("Latitude is not properly parsed", 5d, mapper.readLatitude(columns), 0);
}
@Test
public void testGetLatitudeFromLongColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 5L).add("lon", 0);
+ Columns columns = Columns.empty().add("lat", 5L).add("lon", 0);
assertEquals("Latitude is not properly parsed", 5d, mapper.readLatitude(columns), 0);
}
@Test
public void testGetLatitudeFromFloatColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 5.3f).add("lon", 0);
+ Columns columns = Columns.empty().add("lat", 5.3f).add("lon", 0);
assertEquals("Latitude is not properly parsed", 5.3f, mapper.readLatitude(columns), 0);
}
@Test
public void testGetLatitudeFromDoubleColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 5.3D).add("lon", 0);
+ Columns columns = Columns.empty().add("lat", 5.3D).add("lon", 0);
assertEquals("Latitude is not properly parsed", 5.3d, mapper.readLatitude(columns), 0);
}
@Test
public void testGetLatitudeFromStringColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", "5.3").add("lon", 0);
+ Columns columns = Columns.empty().add("lat", "5.3").add("lon", 0);
assertEquals("Latitude is not properly parsed", 5.3d, mapper.readLatitude(columns), 0);
}
@Test
public void testGetLatitudeFromShortColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", new Short("5")).add("lon", 0);
+ Columns columns = Columns.empty().add("lat", new Short("5")).add("lon", 0);
assertEquals("Latitude is not properly parsed", 5d, mapper.readLatitude(columns), 0);
}
@Test(expected = IndexException.class)
public void testGetLatitudeFromUnparseableStringColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", "abc").add("lon", 0);
+ Columns columns = Columns.empty().add("lat", "abc").add("lon", 0);
mapper.readLatitude(columns);
}
@Test
public void testGetLatitudeWithNullColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- assertNull("Latitude is not properly parsed", mapper.readLatitude(new Columns()));
+ assertNull("Latitude is not properly parsed", mapper.readLatitude(Columns.empty()));
}
@Test(expected = IndexException.class)
public void testGetLatitudeWithTooSmallColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", "-91").add("lon", 0);
+ Columns columns = Columns.empty().add("lat", "-91").add("lon", 0);
mapper.readLatitude(columns);
}
@Test(expected = IndexException.class)
public void testGetLatitudeWithTooBigColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", "91").add("lon", 0);
+ Columns columns = Columns.empty().add("lat", "91").add("lon", 0);
mapper.readLatitude(columns);
}
@Test(expected = IndexException.class)
public void testGetLatitudeWithTooSmallShortColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", new Short("-91")).add("lon", 0);
+ Columns columns = Columns.empty().add("lat", new Short("-91")).add("lon", 0);
mapper.readLatitude(columns);
}
@Test(expected = IndexException.class)
public void testGetLatitudeWithTooBigShortColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", new Short("91")).add("lon", 0);
+ Columns columns = Columns.empty().add("lat", new Short("91")).add("lon", 0);
mapper.readLatitude(columns);
}
@Test
public void testGetLongitudeFromNullColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 5).add("lon");
+ Columns columns = Columns.empty().add("lat", 5).add("lon");
assertNull("Longitude is not properly parsed", mapper.readLongitude(columns));
}
@Test
public void testGetLongitudeFromIntColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", 5);
+ Columns columns = Columns.empty().add("lat", 0).add("lon", 5);
assertEquals("Longitude is not properly parsed", 5d, mapper.readLongitude(columns), 0);
}
@Test
public void testGetLongitudeFromLongColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", 5L);
+ Columns columns = Columns.empty().add("lat", 0).add("lon", 5L);
assertEquals("Longitude is not properly parsed", 5d, mapper.readLongitude(columns), 0);
}
@Test
public void testGetLongitudeFromFloatColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", 5.3f);
+ Columns columns = Columns.empty().add("lat", 0).add("lon", 5.3f);
assertEquals("Longitude is not properly parsed", 5.3f, mapper.readLongitude(columns), 0);
}
@Test
public void testGetLongitudeFromDoubleColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", 5.3D);
+ Columns columns = Columns.empty().add("lat", 0).add("lon", 5.3D);
assertEquals("Longitude is not properly parsed", 5.3d, mapper.readLongitude(columns), 0);
}
@Test
public void testGetLongitudeFromStringColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", "5.3");
+ Columns columns = Columns.empty().add("lat", 0).add("lon", "5.3");
assertEquals("Longitude is not properly parsed", 5.3d, mapper.readLongitude(columns), 0);
}
@Test
public void testGetLongitudeFromShortColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", new Short("5"));
+ Columns columns = Columns.empty().add("lat", 0).add("lon", new Short("5"));
assertEquals("Longitude is not properly parsed", 5d, mapper.readLongitude(columns), 0);
}
@Test(expected = IndexException.class)
public void testGetLongitudeFromUnparseableStringColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", "abc");
+ Columns columns = Columns.empty().add("lat", 0).add("lon", "abc");
mapper.readLongitude(columns);
}
@Test
public void testGetLongitudeWithNullColumn() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- assertNull("Longitude is not properly parsed", mapper.readLongitude(new Columns()));
+ assertNull("Longitude is not properly parsed", mapper.readLongitude(Columns.empty()));
}
@Test(expected = IndexException.class)
public void testGetLongitudeWithWrongColumnType() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", UUID.randomUUID());
+ Columns columns = Columns.empty().add("lat", 0).add("lon", UUID.randomUUID());
assertEquals("Longitude is not properly parsed", 5.3d, mapper.readLongitude(columns), 0);
}
@Test(expected = IndexException.class)
public void testGetLongitudeWithTooSmallColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", "-181");
+ Columns columns = Columns.empty().add("lat", 0).add("lon", "-181");
mapper.readLongitude(columns);
}
@Test(expected = IndexException.class)
public void testGetLongitudeWithTooBigColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", "181");
+ Columns columns = Columns.empty().add("lat", 0).add("lon", "181");
mapper.readLongitude(columns);
}
@Test(expected = IndexException.class)
public void testGetLongitudeWithTooSmallShortColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", new Short("-181"));
+ Columns columns = Columns.empty().add("lat", 0).add("lon", new Short("-181"));
mapper.readLongitude(columns);
}
@Test(expected = IndexException.class)
public void testGetLongitudeWithTooBigShortColumnValue() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").build("field");
- Columns columns = new Columns().add("lat", 0).add("lon", new Short("181"));
+ Columns columns = Columns.empty().add("lat", 0).add("lon", new Short("181"));
mapper.readLongitude(columns);
}
@@ -313,7 +313,7 @@ public void testSortField() {
@Test
public void testAddFields() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").maxLevels(10).build("field");
- Columns columns = new Columns().add("lat", 20).add("lon", "30");
+ Columns columns = Columns.empty().add("lat", 20).add("lon", "30");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
}
@@ -321,7 +321,7 @@ public void testAddFields() {
@Test
public void testAddFieldsWithNullColumns() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").maxLevels(10).build("field");
- Columns columns = new Columns();
+ Columns columns = Columns.empty();
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 0, fields.size());
}
@@ -329,14 +329,14 @@ public void testAddFieldsWithNullColumns() {
@Test(expected = IndexException.class)
public void testAddFieldsWithNullLatitude() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").maxLevels(10).build("field");
- Columns columns = new Columns().add("lon", "30");
+ Columns columns = Columns.empty().add("lon", "30");
mapper.indexableFields(columns);
}
@Test(expected = IndexException.class)
public void testAddFieldsWithNullLongitude() {
GeoPointMapper mapper = geoPointMapper("lat", "lon").maxLevels(10).build("field");
- Columns columns = new Columns().add("lat", 20);
+ Columns columns = Columns.empty().add("lat", 20);
mapper.indexableFields(columns);
}
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapperTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapperTest.java
index c77701f4d..7059428c9 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapperTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/GeoShapeMapperTest.java
@@ -64,7 +64,7 @@ public void testConstructorWithAllArgs() {
@Test
public void testJsonSerialization() {
GeoShapeMapperBuilder builder = geoShapeMapper().column("column").maxLevels(10).transformations(new Centroid());
- testJson(builder, "{type:\"geo_shape\",column:\"column\",transformations:[{type:\"centroid\"}],max_levels:10}");
+ testJson(builder, "{type:\"geo_shape\",column:\"column\",max_levels:10,transformations:[{type:\"centroid\"}]}");
}
@Test
@@ -139,7 +139,7 @@ public void testSortField() {
@Test
public void testAddFieldsWithValidPoint() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns().add("column", "POINT(30.5 10.0)");
+ Columns columns = Columns.empty().add("column", "POINT(30.5 10.0)");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
}
@@ -147,7 +147,7 @@ public void testAddFieldsWithValidPoint() {
@Test
public void testAddFieldsWithValidLineString() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns().add("column", "LINESTRING (30 10, 10 30, 40 40)");
+ Columns columns = Columns.empty().add("column", "LINESTRING (30 10, 10 30, 40 40)");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
}
@@ -155,7 +155,7 @@ public void testAddFieldsWithValidLineString() {
@Test
public void testAddFieldsWithValidLinearRing() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns().add("column", "LINEARRING(30 10, 10 30, 40 40,30 10)");
+ Columns columns = Columns.empty().add("column", "LINEARRING(30 10, 10 30, 40 40,30 10)");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
}
@@ -163,7 +163,7 @@ public void testAddFieldsWithValidLinearRing() {
@Test
public void testAddFieldsWithValidPolygon() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns().add("column", "POLYGON((0.0 0.0,0.0 10.0,10.0 0.0,0.0 0.0))");
+ Columns columns = Columns.empty().add("column", "POLYGON((0.0 0.0,0.0 10.0,10.0 0.0,0.0 0.0))");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
}
@@ -171,7 +171,7 @@ public void testAddFieldsWithValidPolygon() {
@Test
public void testAddFieldsWithValidPolygon2() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns()
+ Columns columns = Columns.empty()
.add("column",
"POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))");
List fields = mapper.indexableFields(columns);
@@ -181,7 +181,7 @@ public void testAddFieldsWithValidPolygon2() {
@Test
public void testAddFieldsWithValidMultiPoint() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns().add("column", "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))");
+ Columns columns = Columns.empty().add("column", "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
}
@@ -189,7 +189,7 @@ public void testAddFieldsWithValidMultiPoint() {
@Test
public void testAddFieldsWithValidMultiPoint2() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns().add("column", "MULTIPOINT (10 40, 40 30, 20 20, 30 10)");
+ Columns columns = Columns.empty().add("column", "MULTIPOINT (10 40, 40 30, 20 20, 30 10)");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
}
@@ -197,7 +197,7 @@ public void testAddFieldsWithValidMultiPoint2() {
@Test
public void testAddFieldsWithValidMultiline() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns()
+ Columns columns = Columns.empty()
.add("column", "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))");
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 2, fields.size());
@@ -206,7 +206,7 @@ public void testAddFieldsWithValidMultiline() {
@Test
public void testAddFieldsWithValidUnionMultiPolygon() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns()
+ Columns columns = Columns.empty()
.add("column",
"MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))");
List fields = mapper.indexableFields(columns);
@@ -216,7 +216,7 @@ public void testAddFieldsWithValidUnionMultiPolygon() {
@Test
public void testAddFieldsWithValidUnionMultiPolygon2() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns()
+ Columns columns = Columns.empty()
.add("column",
"MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 10 30, 10 10, 30 5, 45 20, 20 35),(30 20, 20 15, 20 25, 30 20)))");
List fields = mapper.indexableFields(columns);
@@ -226,7 +226,7 @@ public void testAddFieldsWithValidUnionMultiPolygon2() {
@Test
public void testAddFieldsWithNullColumns() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns();
+ Columns columns = Columns.empty();
List fields = mapper.indexableFields(columns);
assertEquals("Fields are not properly created", 0, fields.size());
}
@@ -234,7 +234,7 @@ public void testAddFieldsWithNullColumns() {
@Test(expected = IndexException.class)
public void testAddFieldsWithInvalidShape() {
GeoShapeMapper mapper = geoShapeMapper().column("column").maxLevels(10).build("field");
- Columns columns = new Columns().add("column", "POLYON((0.0 0.0,0.0 10.0,10.0 0.0,0.0 0.0))");
+ Columns columns = Columns.empty().add("column", "POLYON((0.0 0.0,0.0 10.0,10.0 0.0,0.0 0.0))");
mapper.indexableFields(columns);
}
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/StringMapperTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/StringMapperTest.java
index 561d0bec6..ca482b1b1 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/StringMapperTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/schema/mapping/StringMapperTest.java
@@ -221,7 +221,7 @@ public void testBaseCaseSensitiveFalse() {
@Test
public void testAddFields() {
StringMapper mapper = stringMapper().caseSensitive(true).build("field");
- Columns columns = new Columns().add("field", "value");
+ Columns columns = Columns.empty().add("field", "value");
List fields = mapper.indexableFields(columns);
assertEquals("Number of created fields is wrong", 2, fields.size());
assertTrue("Indexed field is not properly created", fields.get(0) instanceof Field);
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/search/SearchBuilderTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/search/SearchBuilderTest.java
index 6725f8dba..42a3ae79e 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/search/SearchBuilderTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/search/SearchBuilderTest.java
@@ -18,7 +18,7 @@
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.search.condition.builder.ConditionBuilder;
import com.stratio.cassandra.lucene.search.sort.builder.SimpleSortFieldBuilder;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import org.junit.Test;
import java.io.IOException;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/AbstractConditionTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/AbstractConditionTest.java
index 5b31b42ce..38decc246 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/AbstractConditionTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/AbstractConditionTest.java
@@ -16,7 +16,7 @@
package com.stratio.cassandra.lucene.search.condition;
import com.stratio.cassandra.lucene.search.condition.builder.ConditionBuilder;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import java.io.IOException;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/FuzzyConditionTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/FuzzyConditionTest.java
index 4c24d29fa..da2ec4060 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/FuzzyConditionTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/FuzzyConditionTest.java
@@ -101,8 +101,8 @@ public void testJsonSerialization() {
.maxExpansions(49)
.transpositions(true);
testJsonSerialization(builder,
- "{type:\"fuzzy\",field:\"field\",value:\"value\",boost:0.7," +
- "transpositions:true,max_edits:2,prefix_length:2,max_expansions:49}");
+ "{type:\"fuzzy\",field:\"field\",value:\"value\",boost:0.7,max_edits:2," +
+ "prefix_length:2,max_expansions:49,transpositions:true}");
}
@Test
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/GeoShapeConditionTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/GeoShapeConditionTest.java
index aea12306f..66d8f6fe0 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/GeoShapeConditionTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/search/condition/GeoShapeConditionTest.java
@@ -28,7 +28,7 @@
import static com.stratio.cassandra.lucene.schema.SchemaBuilders.*;
import static com.stratio.cassandra.lucene.search.SearchBuilders.geoShape;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
import static org.junit.Assert.*;
/**
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilderTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilderTest.java
index 70bbdd8f8..be0a302c5 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilderTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/GeoDistanceSortFieldBuilderTest.java
@@ -18,7 +18,7 @@
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.search.sort.GeoDistanceSortField;
import com.stratio.cassandra.lucene.search.sort.SortField;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import org.junit.Test;
import java.io.IOException;
@@ -103,14 +103,14 @@ public void testBuildReverse() {
@Test
public void testJson() throws IOException {
String json1 = "{type:\"geo_distance\",field:\"geo_place\",latitude:0.0,longitude:0.0,reverse:false}";
- String json2 = JsonSerializer.toString(JsonSerializer.fromString(json1, GeoDistanceSortFieldBuilder.class));
+ String json2 = JsonSerializer.toString(JsonSerializer.fromString(json1, SortFieldBuilder.class));
assertEquals("JSON serialization is wrong", json1, json2);
}
@Test
public void testJsonDefault() throws IOException {
String json1 = "{type:\"geo_distance\",field:\"geo_place\",latitude:0.0,longitude:0.0,reverse:false}";
- GeoDistanceSortFieldBuilder builder = JsonSerializer.fromString(json1, GeoDistanceSortFieldBuilder.class);
+ SortFieldBuilder builder = JsonSerializer.fromString(json1, SortFieldBuilder.class);
String json2 = JsonSerializer.toString(builder);
assertEquals("JSON serialization is wrong", json1, json2);
}
@@ -118,7 +118,7 @@ public void testJsonDefault() throws IOException {
@Test
public void testJsonReverse() throws IOException {
String json1 = "{type:\"geo_distance\",field:\"geo_place\",latitude:0.0,longitude:0.0,reverse:false}";
- String json2 = JsonSerializer.toString(JsonSerializer.fromString(json1, GeoDistanceSortFieldBuilder.class));
+ String json2 = JsonSerializer.toString(JsonSerializer.fromString(json1, SortFieldBuilder.class));
assertEquals("JSON serialization is wrong", json1, json2);
}
}
\ No newline at end of file
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilderTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilderTest.java
index 960348f4b..a1b532d1e 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilderTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/search/sort/builder/SimpleSortFieldBuilderTest.java
@@ -17,7 +17,7 @@
import com.stratio.cassandra.lucene.search.sort.SimpleSortField;
import com.stratio.cassandra.lucene.search.sort.SortField;
-import com.stratio.cassandra.lucene.util.JsonSerializer;
+import com.stratio.cassandra.lucene.common.JsonSerializer;
import org.junit.Test;
import java.io.IOException;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/util/ByteBufferUtilsTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/util/ByteBufferUtilsTest.java
deleted file mode 100644
index 580d5c299..000000000
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/util/ByteBufferUtilsTest.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright (C) 2014 Stratio (http://stratio.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.stratio.cassandra.lucene.util;
-
-import org.apache.cassandra.db.marshal.BooleanType;
-import org.apache.cassandra.db.marshal.CompositeType;
-import org.apache.cassandra.db.marshal.Int32Type;
-import org.apache.cassandra.db.marshal.UTF8Type;
-import org.apache.cassandra.utils.ByteBufferUtil;
-import org.apache.lucene.util.BytesRef;
-import org.junit.Test;
-
-import java.nio.ByteBuffer;
-
-import static org.junit.Assert.*;
-
-/**
- * Class for testing {@link ByteBufferUtils}.
- *
- * @author Andres de la Pena {@literal }
- */
-public class ByteBufferUtilsTest {
-
- @Test
- public void testBytesRef() throws Exception {
- CompositeType type = CompositeType.getInstance(UTF8Type.instance, Int32Type.instance);
- ByteBuffer in = type.decompose("monkey", 1);
- BytesRef bytesRef = ByteBufferUtils.bytesRef(in);
- ByteBuffer out = ByteBufferUtils.byteBuffer(bytesRef);
- assertEquals("Failing conversion between ByteBuffer and BytesRef", 0, ByteBufferUtil.compareUnsigned(in, out));
- }
-
- @Test
- public void testIsEmptyTrue() {
- ByteBuffer bb = ByteBuffer.allocate(0);
- assertTrue(ByteBufferUtils.isEmpty(bb));
- }
-
- @Test
- public void testIsEmptyFalse() {
- ByteBuffer bb = ByteBuffer.allocate(10);
- assertFalse(ByteBufferUtils.isEmpty(bb));
- }
-
- @Test
- public void testSplit() {
- ByteBuffer bb = UTF8Type.instance.decompose("test");
- assertEquals("Must be split to one element", 1, ByteBufferUtils.split(bb, UTF8Type.instance).length);
- }
-
- @Test
- public void testSplitComposite() {
- CompositeType type = CompositeType.getInstance(UTF8Type.instance, Int32Type.instance);
- ByteBuffer bb = type.builder()
- .add(UTF8Type.instance.decompose("1"))
- .add(Int32Type.instance.decompose(1))
- .build();
- assertEquals("Must be split to two elements", 2, ByteBufferUtils.split(bb, type).length);
- }
-
- @Test
- public void testToString() {
- ByteBuffer bb = UTF8Type.instance.decompose("test");
- String string = ByteBufferUtils.toString(bb, UTF8Type.instance);
- assertEquals("Abstract type string conversion is failing", "test", string);
- }
-
- @Test
- public void testToStringComposite() throws Exception {
- CompositeType type = CompositeType.getInstance(UTF8Type.instance, Int32Type.instance);
- ByteBuffer bb = type.decompose("monkey", 1);
- String string = ByteBufferUtils.toString(bb, type);
- assertEquals("Composite type string conversion is failing", "monkey:1", string);
- }
-
- @Test
- public void testComposeDecompose() {
- ByteBuffer[] bbs = ByteBufferUtils.decompose(ByteBufferUtils.compose(UTF8Type.instance.decompose("test"),
- Int32Type.instance.decompose(999),
- BooleanType.instance.decompose(true)));
- assertEquals("Compose-decompose is wrong", 3, bbs.length);
- assertEquals("Compose-decompose is wrong", "test", UTF8Type.instance.compose(bbs[0]));
- assertEquals("Compose-decompose is wrong", 999, Int32Type.instance.compose(bbs[1]), 0);
- assertEquals("Compose-decompose is wrong", true, BooleanType.instance.compose(bbs[2]));
- }
-
- @Test
- public void testComposeDecomposeEmpty() {
- ByteBuffer[] bbs = ByteBufferUtils.decompose(ByteBufferUtils.compose());
- assertEquals("Compose-decompose with empty components is wrong", 0, bbs.length);
- }
-}
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/util/DateParserTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/util/DateParserTest.java
index 30f66d5d9..8b5b9b6df 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/util/DateParserTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/util/DateParserTest.java
@@ -16,6 +16,7 @@
package com.stratio.cassandra.lucene.util;
import com.stratio.cassandra.lucene.IndexException;
+import com.stratio.cassandra.lucene.common.DateParser;
import org.apache.cassandra.utils.UUIDGen;
import org.joda.time.format.DateTimeFormat;
import org.junit.Assert;
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/util/GeospatialUtilsJTSTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/util/GeospatialUtilsJTSTest.java
index d5aa851f9..54a3e034f 100644
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/util/GeospatialUtilsJTSTest.java
+++ b/plugin/src/test/java/com/stratio/cassandra/lucene/util/GeospatialUtilsJTSTest.java
@@ -16,9 +16,10 @@
package com.stratio.cassandra.lucene.util;
import com.spatial4j.core.shape.jts.JtsGeometry;
+import com.stratio.cassandra.lucene.common.GeospatialUtilsJTS;
import org.junit.Test;
-import static com.stratio.cassandra.lucene.util.GeospatialUtilsJTS.geometry;
+import static com.stratio.cassandra.lucene.common.GeospatialUtilsJTS.geometry;
import static org.junit.Assert.assertEquals;
/**
diff --git a/plugin/src/test/java/com/stratio/cassandra/lucene/util/TimeCounterTest.java b/plugin/src/test/java/com/stratio/cassandra/lucene/util/TimeCounterTest.java
deleted file mode 100644
index 774b9062f..000000000
--- a/plugin/src/test/java/com/stratio/cassandra/lucene/util/TimeCounterTest.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2014 Stratio (http://stratio.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.stratio.cassandra.lucene.util;
-
-import org.junit.Test;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-/**
- * Tests for {@link TimeCounter}.
- *
- * @author Andres de la Pena {@literal }
- */
-public class TimeCounterTest {
-
- @Test
- public void testFlow() throws InterruptedException {
- TimeCounter tc = TimeCounter.create();
- tc.start();
- Thread.sleep(10);
- tc.stop();
- assertTrue("TimeCounter milliseconds should be greater or equal that 10", tc.getTime() >= 10);
- assertNotNull(tc.toString());
- }
-
- @Test(expected = IllegalStateException.class)
- public void testStartStarted() {
- TimeCounter tc = TimeCounter.create();
- tc.start();
- tc.start();
- }
-
- @Test
- public void testStartStopped() throws InterruptedException {
- TimeCounter tc = TimeCounter.create();
- tc.start();
- Thread.sleep(5);
- tc.stop();
- long t1 = tc.getTime();
- tc.start();
- Thread.sleep(5);
- tc.stop();
- long t2 = tc.getTime();
- assertTrue("TimeCounter milliseconds should be incremented", t2 > t1);
- }
-
- @Test(expected = IllegalStateException.class)
- public void testStopStopped() {
- TimeCounter tc = TimeCounter.create();
- tc.start();
- tc.stop();
- tc.stop();
- }
-
- @Test(expected = IllegalStateException.class)
- public void testStopNotStarted() {
- TimeCounter tc = TimeCounter.create();
- tc.stop();
- }
-}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/BaseScalaTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/BaseScalaTest.scala
index 527b0b957..682e78f95 100644
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/BaseScalaTest.scala
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/BaseScalaTest.scala
@@ -15,11 +15,55 @@
*/
package com.stratio.cassandra.lucene
+import com.google.common.collect.Lists
+import org.apache.cassandra.cql3.FieldIdentifier
+import org.apache.cassandra.db.marshal._
import org.scalatest.{FunSuite, Matchers}
+import scala.collection.JavaConverters._
/** Base test.
*
* @author Andres de la Pena `adelapena@stratio.com`
*/
class BaseScalaTest extends FunSuite with Matchers {
+
+
+}
+
+object BaseScalaTest {
+
+ val utf8 = UTF8Type.instance
+ val ascii = AsciiType.instance
+ val int32 = Int32Type.instance
+ val byte = ByteType.instance
+ val short = ShortType.instance
+ val long = LongType.instance
+ val float = FloatType.instance
+ val double = DoubleType.instance
+ val date = SimpleDateType.instance
+ val integer = IntegerType.instance
+ val uuid = UUIDType.instance
+ val lexicalUuid = LexicalUUIDType.instance
+ val timeUuid = TimeUUIDType.instance
+ val decimal = DecimalType.instance
+ val timestamp = TimestampType.instance
+ val boolean = BooleanType.instance
+
+ def set[A](elements: AbstractType[A], multiCell: Boolean): SetType[A] =
+ SetType.getInstance(elements, multiCell)
+
+ def list[A](elements: AbstractType[A], multiCell: Boolean): ListType[A] =
+ ListType.getInstance(elements, multiCell)
+
+ def map[A, B](keys: AbstractType[A], values: AbstractType[B], multiCell: Boolean): MapType[A, B] =
+ MapType.getInstance(keys, values, multiCell)
+
+ def udt(names: List[String], types: List[AbstractType[_]]): UserType =
+ new UserType(
+ "ks",
+ utf8.decompose("cell"),
+ Lists.newArrayList(names.map(x => new FieldIdentifier(utf8.decompose(x))).asJava),
+ Lists.newArrayList(types.asJava),false)
+
+ def reversed[A](base: AbstractType[A]): ReversedType[A] = ReversedType.getInstance(base)
}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/IndexOptionsTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/IndexOptionsTest.scala
index 03f2cc052..09a407349 100644
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/IndexOptionsTest.scala
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/IndexOptionsTest.scala
@@ -16,6 +16,7 @@
package com.stratio.cassandra.lucene
import com.stratio.cassandra.lucene.IndexOptions._
+import com.stratio.cassandra.lucene.partitioning.{PartitionerOnNone, PartitionerOnToken}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@@ -249,4 +250,21 @@ class IndexOptionsTest extends BaseScalaTest {
val options = Map(EXCLUDED_DATA_CENTERS_OPTION -> " dc1 , dc2 ")
parseExcludedDataCenters(options) shouldBe List("dc1", "dc2")
}
+
+ // Partitioner option tests
+
+ test("parse partitioner option with default") {
+ parsePartitioner(Map(), null) shouldBe DEFAULT_PARTITIONER
+ }
+
+ test("parse partitioner with none partitioner") {
+ val json = "{type:\"none\"}"
+ parsePartitioner(Map(PARTITIONER_OPTION -> json), null) shouldBe PartitionerOnNone()
+ }
+
+ test("parse partitioner with token partitioner") {
+ val json = "{type:\"token\", partitions: 10}"
+ parsePartitioner(Map(PARTITIONER_OPTION -> json), null) shouldBe PartitionerOnToken(10)
+ }
+
}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnTest.scala
index f64a445bd..7ea8a0dff 100644
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnTest.scala
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnTest.scala
@@ -15,7 +15,12 @@
*/
package com.stratio.cassandra.lucene.column
+import java.text.SimpleDateFormat
+import java.util.Date
+
+import com.stratio.cassandra.lucene.column.Column._
import com.stratio.cassandra.lucene.BaseScalaTest
+import com.stratio.cassandra.lucene.BaseScalaTest._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@@ -28,12 +33,10 @@ class ColumnTest extends BaseScalaTest {
test("set default attributes") {
val column = Column("cell")
- column.cellName shouldBe "cell"
- column.mapperName shouldBe "cell"
- column.mapperNames shouldBe List("cell")
- column.fieldName shouldBe "cell"
+ column.cell shouldBe "cell"
+ column.mapper shouldBe "cell"
+ column.field shouldBe "cell"
column.value shouldBe None
- column.deletionTime shouldBe Column.NO_DELETION_TIME
}
test("set all attributes") {
@@ -42,34 +45,11 @@ class ColumnTest extends BaseScalaTest {
.withUDTName("u2")
.withMapName("m1")
.withMapName("m2")
- .withDeletionTime(10)
.withValue(5)
- column.cellName shouldBe "cell"
- column.mapperName shouldBe "cell.u1.u2"
- column.mapperNames shouldBe List("cell", "u1", "u2")
- column.fieldName shouldBe "cell.u1.u2$m1$m2"
+ column.cell shouldBe "cell"
+ column.mapper shouldBe "cell.u1.u2"
+ column.field shouldBe "cell.u1.u2$m1$m2"
column.value shouldBe Some(5)
- column.deletionTime shouldBe 10
- }
-
- test("isDeleted because of value") {
- val column = Column("cell")
- column.isDeleted(0) shouldBe true
- column.isDeleted(Int.MinValue) shouldBe true
- column.isDeleted(Int.MaxValue) shouldBe true
- column.withValue(7).isDeleted(0) shouldBe false
- column.withValue(7).isDeleted(Int.MinValue) shouldBe false
- column.withValue(7).isDeleted(Int.MaxValue) shouldBe true
- }
-
- test("isDeleted because of deletion time") {
- val column = Column("cell").withDeletionTime(10)
- column.isDeleted(9) shouldBe true
- column.isDeleted(10) shouldBe true
- column.isDeleted(11) shouldBe true
- column.withValue(7).isDeleted(9) shouldBe false
- column.withValue(7).isDeleted(10) shouldBe true
- column.withValue(7).isDeleted(11) shouldBe true
}
test("fieldName") {
@@ -79,16 +59,28 @@ class ColumnTest extends BaseScalaTest {
Column("c").withUDTName("u").withMapName("m").fieldName("f") shouldBe "f$m"
}
- test("parse") {
- Column.parse("c") shouldBe Column("c")
- Column.parse("c.u") shouldBe Column("c").withUDTName("u")
- Column.parse("c$m") shouldBe Column("c").withMapName("m")
- Column.parse("c.u$m") shouldBe Column("c").withUDTName("u").withMapName("m")
- Column.parse("c.u1.u2$m1$m2") shouldBe Column("c")
- .withUDTName("u1")
- .withUDTName("u2")
- .withMapName("m1")
- .withMapName("m2")
+ test("parse cell name") {
+ Column.parseCellName("c") shouldBe "c"
+ Column.parseCellName("c.u") shouldBe "c"
+ Column.parseCellName("c$m") shouldBe "c"
+ Column.parseCellName("c.u$m") shouldBe "c"
+ Column.parseCellName("c.u1.u2$m1$m2") shouldBe "c"
+ }
+
+ test("parse mapper name") {
+ Column.parseMapperName("c") shouldBe "c"
+ Column.parseMapperName("c.u") shouldBe "c.u"
+ Column.parseMapperName("c$m") shouldBe "c"
+ Column.parseMapperName("c.u$m") shouldBe "c.u"
+ Column.parseMapperName("c.u1.u2$m1$m2") shouldBe "c.u1.u2"
+ }
+
+ test("parse udt names") {
+ Column.parseUdtNames("c") shouldBe Nil
+ Column.parseUdtNames("c.u") shouldBe List("u")
+ Column.parseUdtNames("c$m") shouldBe Nil
+ Column.parseUdtNames("c.u$m") shouldBe List("u")
+ Column.parseUdtNames("c.u1.u2$m1$m2") shouldBe List("u1", "u2")
}
test("add column") {
@@ -103,7 +95,7 @@ class ColumnTest extends BaseScalaTest {
test("toString with default attributes") {
Column("cell").toString shouldBe
- s"Column{cell=cell, name=cell, value=None, deletionTime=${Column.NO_DELETION_TIME}}"
+ s"Column{cell=cell, field=cell, value=None}"
}
test("toString with all attributes") {
@@ -112,9 +104,46 @@ class ColumnTest extends BaseScalaTest {
.withUDTName("u2")
.withMapName("m1")
.withMapName("m2")
- .withDeletionTime(10)
.withValue(5)
.toString shouldBe
- "Column{cell=cell, name=cell.u1.u2$m1$m2, value=Some(5), deletionTime=10}"
+ "Column{cell=cell, field=cell.u1.u2$m1$m2, value=Some(5)}"
+ }
+
+ test("compose with basic types") {
+ compose(ascii.decompose("aB"), ascii) shouldBe "aB"
+ compose(utf8.decompose("aB"), utf8) shouldBe "aB"
+ compose(byte.decompose(2.toByte), byte) shouldBe 2.toByte
+ compose(short.decompose(2.toShort), short) shouldBe 2.toShort
+ compose(int32.decompose(2), int32) shouldBe 2
+ compose(long.decompose(2l), long) shouldBe 2l
+ compose(float.decompose(2.1f), float) shouldBe 2.1f
+ compose(double.decompose(2.1d), double) shouldBe 2.1d
+ }
+
+ test("compose with SimpleDateType") {
+ val expected: Date = new SimpleDateFormat("yyyy-MM-ddZ").parse("1982-11-27+0000")
+ val bb = date.fromTimeInMillis(expected.getTime)
+ val actual = compose(bb, date)
+ actual shouldBe a[Date]
+ actual shouldBe expected
+ }
+
+ test("with composed value") {
+ Column("c").withValue(ascii.decompose("aB"), ascii) shouldBe Column("c").withValue("aB")
+ Column("c").withValue(utf8.decompose("aB"), utf8) shouldBe Column("c").withValue("aB")
+ Column("c").withValue(byte.decompose(2.toByte), byte) shouldBe Column("c").withValue(2.toByte)
+ Column("c").withValue(short.decompose(2.toShort), short) shouldBe Column("c").withValue(2.toShort)
+ Column("c").withValue(int32.decompose(2), int32) shouldBe Column("c").withValue(2)
+ Column("c").withValue(long.decompose(2l), long) shouldBe Column("c").withValue(2l)
+ Column("c").withValue(float.decompose(2.1f), float) shouldBe Column("c").withValue(2.1f)
+ Column("c").withValue(double.decompose(2.1d), double) shouldBe Column("c").withValue(2.1d)
+ }
+
+ test("with value") {
+ Column("c").withValue(null) shouldBe Column("c")
+ Column("c").withValue(3).withValue(null) shouldBe Column("c")
+ Column("c").withValue(3).withValue(4) shouldBe Column("c").withValue(4)
+ Column("c").withValue(null).withValue(3) shouldBe Column("c").withValue(3)
+ Column("c").withValue(3).withValue(3) shouldBe Column("c").withValue(3)
}
}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnsMapperTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnsMapperTest.scala
deleted file mode 100644
index 27751d59d..000000000
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnsMapperTest.scala
+++ /dev/null
@@ -1,287 +0,0 @@
-/*
- * Copyright (C) 2014 Stratio (http://stratio.com)
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.stratio.cassandra.lucene.column
-
-import java.math.{BigDecimal, BigInteger}
-import java.text.SimpleDateFormat
-import java.util.{Date, UUID}
-
-import com.google.common.collect.Lists
-import com.stratio.cassandra.lucene.BaseScalaTest
-import com.stratio.cassandra.lucene.column.ColumnsMapper._
-import com.stratio.cassandra.lucene.column.ColumnsMapperTest._
-import org.apache.cassandra.config.ColumnDefinition
-import org.apache.cassandra.cql3.FieldIdentifier
-import org.apache.cassandra.db.marshal.{DecimalType, _}
-import org.apache.cassandra.db.rows.Cell.NO_DELETION_TIME
-import org.apache.cassandra.db.rows.{BufferCell, Cell}
-import org.apache.cassandra.utils.UUIDGen
-import org.junit.runner.RunWith
-import org.scalatest.junit.JUnitRunner
-
-import scala.collection.JavaConverters._
-
-/** Tests for [[ColumnsMapper]].
- *
- * @author Andres de la Pena `adelapena@stratio.com`
- */
-@RunWith(classOf[JUnitRunner])
-class ColumnsMapperTest extends BaseScalaTest {
-
- test("compose with basic types") {
- compose(ascii.decompose("aB"), ascii) shouldBe "aB"
- compose(utf8.decompose("aB"), utf8) shouldBe "aB"
- compose(byte.decompose(2.toByte), byte) shouldBe 2.toByte
- compose(short.decompose(2.toShort), short) shouldBe 2.toShort
- compose(int32.decompose(2), int32) shouldBe 2
- compose(long.decompose(2l), long) shouldBe 2l
- compose(float.decompose(2.1f), float) shouldBe 2.1f
- compose(double.decompose(2.1d), double) shouldBe 2.1d
- }
-
- test("compose with SimpleDateType") {
- val expected: Date = new SimpleDateFormat("yyyy-MM-ddZ").parse("1982-11-27+0000")
- val bb = date.fromTimeInMillis(expected.getTime)
- val actual = ColumnsMapper.compose(bb, date)
- actual shouldBe a[Date]
- actual shouldBe expected
- }
-
- test("columns from plain cells") {
- def test[A](abstractType: AbstractType[A], value: A) = {
- val column = Column("cell")
- columns(isTombstone = false, column, abstractType, abstractType.decompose(value)) shouldBe
- Columns(column.withValue(value))
- }
- test(ascii, "Ab")
- test(utf8, "Ab")
- test(int32, 7.asInstanceOf[Integer])
- test(float, 7.3f.asInstanceOf[java.lang.Float])
- test(long, 7l.asInstanceOf[java.lang.Long])
- test(double, 7.3d.asInstanceOf[java.lang.Double])
- test(integer, new BigInteger("7"))
- test(decimal, new BigDecimal("7.3"))
- test(uuid, UUID.randomUUID)
- test(lexicalUuid, UUID.randomUUID)
- test(timeUuid, UUIDGen.getTimeUUID)
- test(timestamp, new Date)
- test(boolean, true.asInstanceOf[java.lang.Boolean])
- }
-
- test("columns from frozen set") {
- val column = Column("cell")
- val `type` = set(utf8, false)
- val bb = `type`.decompose(Set("a", "b").asJava)
- columns(isTombstone = false, column, `type`, bb) shouldBe
- Columns(column.withValue("a"), column.withValue("b"))
- columns(isTombstone = true, column, `type`, bb) shouldBe Columns(column)
- }
-
- test("columns from frozen list") {
- val column = Column("cell")
- val `type` = list(utf8, false)
- val bb = `type`.decompose(List("a", "b").asJava)
- columns(isTombstone = false, column, `type`, bb) shouldBe
- Columns(column.withValue("a"), column.withValue("b"))
- columns(isTombstone = true, column, `type`, bb) shouldBe Columns(column)
- }
-
- test("columns from frozen map") {
- val column = Column("cell")
- val `type` = map(utf8, utf8, true)
- val bb = `type`.decompose(Map("k1" -> "v1", "k2" -> "v2").asJava)
- columns(isTombstone = false, column, `type`, bb) shouldBe
- Columns(column.withMapName("k1").withValue("v1"), column.withMapName("k2").withValue("v2"))
- columns(isTombstone = true, column, `type`, bb) shouldBe Columns(column)
- }
-
- test("columns from tuple") {
- val column = Column("cell")
- val `type` = new TupleType(Lists.newArrayList(utf8, utf8))
- val bb = TupleType.buildValue(Array(utf8.decompose("a"), utf8.decompose("b")))
- columns(isTombstone = false, column, `type`, bb) shouldBe
- Columns(column.withUDTName("0").withValue("a"), column.withUDTName("1").withValue("b"))
- columns(isTombstone = true, column, `type`, bb) shouldBe Columns(column)
- }
-
- test("columns from UDT") {
- val column = Column("cell")
- val `type` = udt(List("a", "b"), List(utf8, utf8))
- val bb = TupleType.buildValue(Array(utf8.decompose("1"), utf8.decompose("2")))
- columns(isTombstone = false, column, `type`, bb) shouldBe
- Columns(column.withUDTName("a").withValue("1"), column.withUDTName("b").withValue("2"))
- columns(isTombstone = true, column, `type`, bb) shouldBe Columns(column)
- }
-
- test("columns from regular cell") {
- val columnDefinition = ColumnDefinition.regularDef("ks", "cf", "cell", utf8)
- val cell = new BufferCell(
- columnDefinition,
- System.currentTimeMillis(),
- Cell.NO_TTL,
- Cell.NO_DELETION_TIME,
- utf8.decompose("a"),
- null)
- columns(cell) shouldBe Columns(Column("cell").withValue("a").withDeletionTime(NO_DELETION_TIME))
- }
-
- test("supports regular") {
- supports(utf8, List(classOf[String])) shouldBe true
- supports(utf8, List(classOf[Number])) shouldBe false
- supports(utf8, List(classOf[String], classOf[Number])) shouldBe true
- supports(utf8, List(classOf[Number], classOf[String])) shouldBe true
- }
-
- test("supports list") {
- supports(list(utf8, false), List(classOf[String])) shouldBe true
- supports(list(utf8, true), List(classOf[String])) shouldBe true
- supports(list(int32, false), List(classOf[String])) shouldBe false
- supports(list(int32, true), List(classOf[String])) shouldBe false
- }
-
- test("supports set") {
- supports(set(utf8, false), List(classOf[String])) shouldBe true
- supports(set(utf8, true), List(classOf[String])) shouldBe true
- supports(set(int32, false), List(classOf[String])) shouldBe false
- supports(set(int32, true), List(classOf[String])) shouldBe false
- }
-
- test("supports map") {
- supports(map(int32, utf8, false), List(classOf[String])) shouldBe true
- supports(map(int32, utf8, true), List(classOf[String])) shouldBe true
- supports(map(utf8, int32, false), List(classOf[String])) shouldBe false
- supports(map(utf8, int32, true), List(classOf[String])) shouldBe false
- }
-
- test("supports reversed") {
- supports(reversed(utf8), List(classOf[String])) shouldBe true
- supports(reversed(int32), List(classOf[String])) shouldBe false
- supports(reversed(utf8), List(classOf[String], classOf[Number])) shouldBe true
- supports(reversed(utf8), List(classOf[Number], classOf[String])) shouldBe true
- }
-
- test("child regular") {
- childType(utf8, "") shouldBe None
- }
-
- test("child UDT") {
- val userType = udt(List("a", "b"), List(utf8, int32))
- childType(userType, "a") shouldBe Some(utf8)
- childType(userType, "b") shouldBe Some(int32)
- childType(userType, "c") shouldBe None
- }
-
- test("child regular set") {
- val setType = set(utf8, true)
- childType(setType, "a") shouldBe None
- }
-
- test("child UDT set") {
- val userType = udt(List("a", "b"), List(utf8, int32))
- val setType = set(userType, true)
- childType(setType, "a") shouldBe Some(utf8)
- childType(setType, "b") shouldBe Some(int32)
- childType(setType, "c") shouldBe None
- }
-
- test("child frozen UDT set") {
- val userType = udt(List("a", "b"), List(utf8, int32))
- val setType = set(userType, false)
- childType(setType, "a") shouldBe Some(utf8)
- childType(setType, "b") shouldBe Some(int32)
- childType(setType, "c") shouldBe None
- }
-
- test("child regular list") {
- val listType = list(utf8, true)
- childType(listType, "a") shouldBe None
- }
-
- test("child UDT list") {
- val userType = udt(List("a", "b"), List(utf8, int32))
- val listType = list(userType, true)
- childType(listType, "a") shouldBe Some(utf8)
- childType(listType, "b") shouldBe Some(int32)
- childType(listType, "c") shouldBe None
- }
-
- test("child frozen UDT list") {
- val userType = udt(List("a", "b"), List(utf8, int32))
- val listType = list(userType, false)
- childType(listType, "a") shouldBe Some(utf8)
- childType(listType, "b") shouldBe Some(int32)
- childType(listType, "c") shouldBe None
- }
-
- test("child regular map") {
- val mapType = map(utf8, utf8, true)
- childType(mapType, "a") shouldBe None
- }
-
- test("child UDT map") {
- val userType = udt(List("a", "b"), List(utf8, int32))
- val mapType = map(utf8, userType, true)
- childType(mapType, "a") shouldBe Some(utf8)
- childType(mapType, "b") shouldBe Some(int32)
- childType(mapType, "c") shouldBe None
- }
-
- test("child frozen UDT map") {
- val userType = udt(List("a", "b"), List(utf8, int32))
- val mapType = map(utf8, userType, false)
- childType(mapType, "a") shouldBe Some(utf8)
- childType(mapType, "b") shouldBe Some(int32)
- childType(mapType, "c") shouldBe None
- }
-}
-
-object ColumnsMapperTest {
-
- val utf8 = UTF8Type.instance
- val ascii = AsciiType.instance
- val int32 = Int32Type.instance
- val byte = ByteType.instance
- val short = ShortType.instance
- val long = LongType.instance
- val float = FloatType.instance
- val double = DoubleType.instance
- val date = SimpleDateType.instance
- val integer = IntegerType.instance
- val uuid = UUIDType.instance
- val lexicalUuid = LexicalUUIDType.instance
- val timeUuid = TimeUUIDType.instance
- val decimal = DecimalType.instance
- val timestamp = TimestampType.instance
- val boolean = BooleanType.instance
-
- def set[A](elements: AbstractType[A], multiCell: Boolean): SetType[A] =
- SetType.getInstance(elements, multiCell)
-
- def list[A](elements: AbstractType[A], multiCell: Boolean): ListType[A] =
- ListType.getInstance(elements, multiCell)
-
- def map[A, B](keys: AbstractType[A], values: AbstractType[B], multiCell: Boolean): MapType[A, B] =
- MapType.getInstance(keys, values, multiCell)
-
- def udt(names: List[String], types: List[AbstractType[_]]): UserType =
- new UserType(
- "ks",
- utf8.decompose("cell"),
- Lists.newArrayList(names.map(x => new FieldIdentifier(utf8.decompose(x))).asJava),
- Lists.newArrayList(types.asJava),false)
-
- def reversed[A](base: AbstractType[A]): ReversedType[A] = ReversedType.getInstance(base)
-}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnsTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnsTest.scala
index cd4531f2c..8e53de631 100644
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnsTest.scala
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/column/ColumnsTest.scala
@@ -38,30 +38,7 @@ class ColumnsTest extends BaseScalaTest {
columns.isEmpty shouldBe false
}
- test("with cell name") {
- val columns = Columns(
- Column("c1"),
- Column("c1").withUDTName("u1"),
- Column("c1").withMapName("m1"),
- Column("c1").withUDTName("u2").withMapName("m2"),
- Column("c2"),
- Column("c2").withUDTName("u1"),
- Column("c2").withMapName("m1"),
- Column("c2").withUDTName("u2").withMapName("m2"))
- columns.withCellName("c1") shouldBe Columns(
- Column("c1"),
- Column("c1").withUDTName("u1"),
- Column("c1").withMapName("m1"),
- Column("c1").withUDTName("u2").withMapName("m2"))
- columns.withCellName("c2") shouldBe Columns(
- Column("c2"),
- Column("c2").withUDTName("u1"),
- Column("c2").withMapName("m1"),
- Column("c2").withUDTName("u2").withMapName("m2"))
- columns.withCellName("c3") shouldBe Columns()
- }
-
- test("with mapper name") {
+ test("foreach with mapper") {
val columns = Columns(
Column("c1"),
Column("c1").withUDTName("u1"),
@@ -71,30 +48,40 @@ class ColumnsTest extends BaseScalaTest {
Column("c2").withUDTName("u1"),
Column("c2").withMapName("m1"),
Column("c2").withUDTName("u1").withMapName("m12"))
- columns.withMapperName("c1") shouldBe Columns(
- Column("c1"),
- Column("c1").withMapName("m1"))
- columns.withMapperName("c1.u1") shouldBe Columns(
+
+ var cols1 = Columns()
+ columns.foreachWithMapper("c1")(c => cols1 += c)
+ cols1 shouldBe Columns(Column("c1"), Column("c1").withMapName("m1"))
+
+ var cols2 = Columns()
+ columns.foreachWithMapper("c1.u1")(c => cols2 += c)
+ cols2 shouldBe Columns(
Column("c1").withUDTName("u1"),
Column("c1").withUDTName("u1").withMapName("m1"))
}
- test("get by full name") {
+ test("value for field") {
val columns = Columns() +
- Column("c1") +
- Column("c1").withUDTName("u1") +
- Column("c1").withMapName("m1") +
- Column("c1").withUDTName("u1").withMapName("m1") +
- Column("c2") +
- Column("c2").withUDTName("u1") +
- Column("c2").withMapName("m1") +
- Column("c2").withUDTName("u1").withMapName("m12")
- columns.withFieldName("c1") shouldBe Columns(
- Column("c1"))
- columns.withFieldName("c1.u1") shouldBe Columns(
- Column("c1").withUDTName("u1"))
- columns.withFieldName("c1.u1$m1") shouldBe Columns(
- Column("c1").withUDTName("u1").withMapName("m1"))
+ Column("c1").withValue(1) +
+ Column("c1").withUDTName("u1").withValue(2) +
+ Column("c1").withMapName("m1").withValue(3) +
+ Column("c1").withUDTName("u1").withMapName("m1").withValue(4) +
+ Column("c2").withValue(5) +
+ Column("c2").withUDTName("u1").withValue(6) +
+ Column("c2").withMapName("m1").withValue(7) +
+ Column("c2").withUDTName("u1").withMapName("m1").withValue(8)
+ columns.valueForField("c1") shouldBe 1
+ columns.valueForField("c1.u1") shouldBe 2
+ columns.valueForField("c1$m1") shouldBe 3
+ columns.valueForField("c1.u1$m1") shouldBe 4
+ columns.valueForField("c2") shouldBe 5
+ columns.valueForField("c2.u1") shouldBe 6
+ columns.valueForField("c2$m1") shouldBe 7
+ columns.valueForField("c2.u1$m1") shouldBe 8
+ }
+
+ test("prepend column") {
+ Column("c1") :: Columns(Column("c2")) shouldBe Columns(Column("c1"), Column("c2"))
}
test("sum column") {
@@ -102,7 +89,7 @@ class ColumnsTest extends BaseScalaTest {
}
test("sum columns") {
- Columns(Column("c1")) + Columns(Column("c2")) shouldBe Columns(Column("c1"), Column("c2"))
+ Columns(Column("c1")) ++ Columns(Column("c2")) shouldBe Columns(Column("c1"), Column("c2"))
}
test("add column without value") {
@@ -113,25 +100,13 @@ class ColumnsTest extends BaseScalaTest {
Columns(Column("c1")).add("c2", 1) shouldBe Columns(Column("c1"), Column("c2").withValue(1))
}
- test("clean deleted") {
- val columns = Columns(
- Column("c1"),
- Column("c2").withValue(1),
- Column("c3").withValue(1).withDeletionTime(1),
- Column("c4").withValue(1).withDeletionTime(2),
- Column("c5").withValue(1).withDeletionTime(3))
- columns.withoutDeleted(2) shouldBe Columns(
- Column("c2").withValue(1),
- Column("c5").withValue(1).withDeletionTime(3))
- }
-
test("toString empty") {
Columns().toString shouldBe "Columns{}"
}
test("toString with columns") {
val columns = Columns(
- Column("c1").withDeletionTime(10),
+ Column("c1"),
Column("c2").withUDTName("u1").withMapName("m1").withValue(7))
columns.toString shouldBe "Columns{c1=None, c2.u1$m1=Some(7)}"
}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/index/FSIndexTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/index/FSIndexTest.scala
index 4d5d969cd..6ebf2dad1 100644
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/index/FSIndexTest.scala
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/index/FSIndexTest.scala
@@ -16,11 +16,10 @@
package com.stratio.cassandra.lucene.index
import java.nio.file.Paths
-import java.util.UUID
+import java.util.{Collections, UUID}
import com.stratio.cassandra.lucene.BaseScalaTest
import com.stratio.cassandra.lucene.IndexOptions._
-import com.stratio.cassandra.lucene.index.FSIndexTest._
import org.apache.lucene.analysis.standard.StandardAnalyzer
import org.apache.lucene.document._
import org.apache.lucene.index.Term
@@ -38,11 +37,37 @@ import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class FSIndexTest extends BaseScalaTest {
+ val REFRESH_SECONDS: Double = 0.1D
+ val REFRESH_MILLISECONDS: Int = (REFRESH_SECONDS * 1000).toInt
+ val WAIT_MILLISECONDS: Int = REFRESH_MILLISECONDS * 2
+
+ def assertCount(docs: DocumentIterator, expected: Int) {
+ var count = 0
+ docs.foreach(_ => count += 1)
+ assertEquals("Expected " + expected + " documents", expected, count)
+ }
+
+ def doWithIndex(f: FSIndex => Unit): Unit = {
+ val temporaryFolder = new TemporaryFolder
+ temporaryFolder.create()
+ try {
+ val index = new FSIndex(
+ "test_index",
+ Paths.get(temporaryFolder.newFolder("directory" + UUID.randomUUID).getPath),
+ new StandardAnalyzer,
+ REFRESH_SECONDS,
+ DEFAULT_RAM_BUFFER_MB,
+ DEFAULT_MAX_MERGE_MB,
+ DEFAULT_MAX_CACHED_MB)
+ f.apply(index)
+ } finally temporaryFolder.delete()
+ }
+
test("CRUD operations") {
doWithIndex(
index => {
val sort = new Sort(new SortedSetSortField("field", false))
- val fields = Set("field")
+ val fields = Collections.singleton("field")
index.init(sort, fields)
assertEquals("Index must be empty", 0, index.getNumDocs)
@@ -105,7 +130,7 @@ class FSIndexTest extends BaseScalaTest {
doWithIndex(
index => {
val sort = new Sort(new SortedNumericSortField("field", SortField.Type.INT, false))
- val fields = Set("field")
+ val fields = Collections.singleton("field")
index.init(sort, fields)
assertEquals("Index must be empty", 0, index.getNumDocs)
@@ -127,34 +152,5 @@ class FSIndexTest extends BaseScalaTest {
assertCount(index.search(Some(new Term("field_s", "49")), query, sort, 1000), 50)
})
}
-}
-
-object FSIndexTest {
-
- val REFRESH_SECONDS: Double = 0.1D
- val REFRESH_MILLISECONDS: Int = (REFRESH_SECONDS * 1000).toInt
- val WAIT_MILLISECONDS: Int = REFRESH_MILLISECONDS * 2
-
- def assertCount(docs: DocumentIterator, expected: Int) {
- var count = 0
- docs.foreach(_ => count += 1)
- assertEquals("Expected " + expected + " documents", expected, count)
- }
-
- def doWithIndex(f: FSIndex => Unit): Unit = {
- val temporaryFolder = new TemporaryFolder
- temporaryFolder.create()
- try {
- val index = new FSIndex(
- "test_index",
- Paths.get(temporaryFolder.newFolder("directory" + UUID.randomUUID).getPath),
- new StandardAnalyzer,
- REFRESH_SECONDS,
- DEFAULT_RAM_BUFFER_MB,
- DEFAULT_MAX_MERGE_MB,
- DEFAULT_MAX_CACHED_MB)
- f.apply(index)
- } finally temporaryFolder.delete()
- }
-}
+}
\ No newline at end of file
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/index/PartitionedIndexTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/index/PartitionedIndexTest.scala
new file mode 100644
index 000000000..2626ad38d
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/index/PartitionedIndexTest.scala
@@ -0,0 +1,245 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.index
+
+import java.nio.file.Paths
+import java.util.{Collections, UUID}
+
+import com.stratio.cassandra.lucene.BaseScalaTest
+import com.stratio.cassandra.lucene.IndexOptions._
+import org.apache.lucene.analysis.standard.StandardAnalyzer
+import org.apache.lucene.document._
+import org.apache.lucene.index.Term
+import org.apache.lucene.search._
+import org.apache.lucene.util.BytesRef
+import org.junit.Assert.assertEquals
+import org.junit.rules.TemporaryFolder
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+
+/** Tests for [[PartitionedIndex]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+@RunWith(classOf[JUnitRunner])
+class PartitionedIndexTest extends BaseScalaTest {
+
+ val REFRESH_SECONDS: Double = 0.1D
+ val REFRESH_MILLISECONDS: Int = (REFRESH_SECONDS * 1000).toInt
+ val WAIT_MILLISECONDS: Int = REFRESH_MILLISECONDS * 2
+
+ def assertCount(docs: DocumentIterator, expected: Int) {
+ var count = 0
+ docs.foreach(_ => count += 1)
+ assertEquals("Expected " + expected + " documents", expected, count)
+ }
+
+ def doWithIndex(numPartitions: Int, f: PartitionedIndex => Unit): Unit = {
+ val temporaryFolder = new TemporaryFolder
+ temporaryFolder.create()
+ try {
+ val index = new PartitionedIndex(
+ numPartitions,
+ "test_index",
+ Paths.get(temporaryFolder.newFolder("directory" + UUID.randomUUID).getPath),
+ new StandardAnalyzer,
+ REFRESH_SECONDS,
+ DEFAULT_RAM_BUFFER_MB,
+ DEFAULT_MAX_MERGE_MB,
+ DEFAULT_MAX_CACHED_MB)
+ f.apply(index)
+ } finally temporaryFolder.delete()
+ }
+
+ test("CRUD without partitioning") {
+ doWithIndex(1, index => {
+ val sort = new Sort(new SortedSetSortField("field", false))
+ val fields = Collections.singleton("field")
+ index.init(sort, fields)
+
+ assertEquals("Index must be empty", 0, index.getNumDocs)
+
+ val term1 = new Term("field", "value1")
+ val document1 = new Document
+ document1.add(new StringField("field", "value1", Field.Store.NO))
+ document1.add(new SortedSetDocValuesField("field", new BytesRef("value1")))
+ index.upsert(0, term1, document1)
+
+ val term2 = new Term("field", "value2")
+ val document2 = new Document
+ document2.add(new StringField("field", "value2", Field.Store.NO))
+ document2.add(new SortedSetDocValuesField("field", new BytesRef("value2")))
+ document2.add(new SortedSetDocValuesField("field", new BytesRef("value3")))
+ index.upsert(0, term2, document2)
+
+ index.commit()
+ Thread.sleep(REFRESH_MILLISECONDS)
+ assertEquals("Expected 2 documents", 2, index.getNumDocs)
+
+ val query = new WildcardQuery(new Term("field", "value*"))
+ assertCount(index.search(List(0), None, query, sort, 1), 2)
+
+ // Delete by term
+ index.delete(0, term1)
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 1 document", 1, index.getNumDocs)
+
+ // Delete by query
+ index.upsert(0, term1, document1)
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 2 documents", 2, index.getNumDocs)
+ index.delete(0, new TermQuery(term1))
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 1 document", 1, index.getNumDocs)
+
+ // Upsert
+ index.upsert(0, term1, document1)
+ index.upsert(0, term2, document2)
+ index.upsert(0, term2, document2)
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 2 documents", 2, index.getNumDocs)
+
+ // Truncate
+ index.truncate()
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 0 documents", 0, index.getNumDocs)
+
+ // Delete
+ index.delete()
+ })
+ }
+
+ test("CRUD with partitioning") {
+ doWithIndex(2, index => {
+ val sort = new Sort(new SortedSetSortField("field", false))
+ val fields = Collections.singleton("field")
+ index.init(sort, fields)
+
+ assertEquals("Index must be empty", 0, index.getNumDocs)
+
+ val term1 = new Term("field", "value1")
+ val document1 = new Document
+ document1.add(new StringField("field", "value1", Field.Store.NO))
+ document1.add(new SortedSetDocValuesField("field", new BytesRef("value1")))
+ index.upsert(0, term1, document1)
+
+ val term2 = new Term("field", "value2")
+ val document2 = new Document
+ document2.add(new StringField("field", "value2", Field.Store.NO))
+ document2.add(new SortedSetDocValuesField("field", new BytesRef("value2")))
+ document2.add(new SortedSetDocValuesField("field", new BytesRef("value3")))
+ index.upsert(1, term2, document2)
+
+ index.commit()
+ Thread.sleep(REFRESH_MILLISECONDS)
+ assertEquals("Expected 2 documents", 2, index.getNumDocs)
+
+ val query = new WildcardQuery(new Term("field", "value*"))
+ assertCount(index.search(List(0), None, query, sort, 1), 1)
+ assertCount(index.search(List(1), None, query, sort, 1), 1)
+ assertCount(index.search(List(0, 1), None, query, sort, 1), 2)
+
+ // Delete by term
+ index.delete(0, term1)
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 1 document", 1, index.getNumDocs)
+
+ // Delete by query
+ index.upsert(0, term1, document1)
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 2 documents", 2, index.getNumDocs)
+ index.delete(0, new TermQuery(term1))
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 1 document", 1, index.getNumDocs)
+
+ // Upsert
+ index.upsert(0, term1, document1)
+ index.upsert(1, term2, document2)
+ index.upsert(1, term2, document2)
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 2 documents", 2, index.getNumDocs)
+
+ // Truncate
+ index.truncate()
+ index.commit()
+ Thread.sleep(WAIT_MILLISECONDS)
+ assertEquals("Expected 0 documents", 0, index.getNumDocs)
+
+ // Delete
+ index.delete()
+ })
+ }
+
+ test("pagination without partitioning") {
+ doWithIndex(1, index => {
+ val sort = new Sort(new SortedNumericSortField("field", SortField.Type.INT, false))
+ val fields = Collections.singleton("field")
+ index.init(sort, fields)
+
+ assertEquals("Index must be empty", 0, index.getNumDocs)
+
+ for (i <- 0 until 100) {
+ val value = i.toString
+ val term = new Term("field_s", value)
+ val document = new Document
+ document.add(new StringField("field_s", value, Field.Store.NO))
+ document.add(new SortedNumericDocValuesField("field", i))
+ index.upsert(0, term, document)
+ }
+
+ index.commit()
+ Thread.sleep(REFRESH_MILLISECONDS)
+ assertEquals("Expected 2 documents", 100, index.getNumDocs)
+ val query = new MatchAllDocsQuery
+ assertCount(index.search(List(0), None, query, sort, 1000), 100)
+ assertCount(index.search(List(0), Some(new Term("field_s", "49")), query, sort, 1000), 50)
+ })
+ }
+
+ test("pagination with partitioning") {
+ doWithIndex(2, index => {
+ val sort = new Sort(new SortedNumericSortField("field", SortField.Type.INT, false))
+ val fields = Collections.singleton("field")
+ index.init(sort, fields)
+
+ assertEquals("Index must be empty", 0, index.getNumDocs)
+
+ for (i <- 0 until 100) {
+ val value = i.toString
+ val term = new Term("field_s", value)
+ val document = new Document
+ document.add(new StringField("field_s", value, Field.Store.NO))
+ document.add(new SortedNumericDocValuesField("field", i))
+ index.upsert(i % 2, term, document)
+ }
+
+ index.commit()
+ Thread.sleep(REFRESH_MILLISECONDS)
+ assertEquals("Expected 2 documents", 100, index.getNumDocs)
+ val query = new MatchAllDocsQuery
+ assertCount(index.search(List(0, 1), None, query, sort, 1000), 100)
+ assertCount(index.search(List(0, 1), Some(new Term("field_s", "49")), query, sort, 1000), 50)
+ })
+ }
+
+}
\ No newline at end of file
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/key/ClusteringMapperTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/ClusteringMapperTest.scala
similarity index 96%
rename from plugin/src/test/scala/com/stratio/cassandra/lucene/key/ClusteringMapperTest.scala
rename to plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/ClusteringMapperTest.scala
index 2e44f6f8e..1df016e75 100644
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/key/ClusteringMapperTest.scala
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/ClusteringMapperTest.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.key
+package com.stratio.cassandra.lucene.mapping
import com.stratio.cassandra.lucene.BaseScalaTest
import org.apache.cassandra.dht.Murmur3Partitioner
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/ColumnsMapperTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/ColumnsMapperTest.scala
new file mode 100644
index 000000000..87a2a0c76
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/ColumnsMapperTest.scala
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.mapping
+
+import java.math.{BigDecimal, BigInteger}
+import java.util.{Date, UUID}
+
+import com.google.common.collect.Lists
+import com.stratio.cassandra.lucene.BaseScalaTest
+import com.stratio.cassandra.lucene.BaseScalaTest._
+import com.stratio.cassandra.lucene.column.{Column, Columns}
+import com.stratio.cassandra.lucene.mapping.ColumnsMapper._
+import org.apache.cassandra.config.ColumnDefinition
+import org.apache.cassandra.db.marshal._
+import org.apache.cassandra.db.rows.{BufferCell, Cell}
+import org.apache.cassandra.utils.UUIDGen
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+
+import scala.collection.JavaConverters._
+
+/** Tests for [[ColumnsMapper]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+@RunWith(classOf[JUnitRunner])
+class ColumnsMapperTest extends BaseScalaTest {
+
+ test("columns from plain cells") {
+ def test[A](abstractType: AbstractType[A], value: A) = {
+ val column = Column("cell")
+ columns(column, abstractType, abstractType.decompose(value)) shouldBe
+ Columns(column.withValue(value))
+ }
+ test(ascii, "Ab")
+ test(utf8, "Ab")
+ test(int32, 7.asInstanceOf[Integer])
+ test(float, 7.3f.asInstanceOf[java.lang.Float])
+ test(long, 7l.asInstanceOf[java.lang.Long])
+ test(double, 7.3d.asInstanceOf[java.lang.Double])
+ test(integer, new BigInteger("7"))
+ test(decimal, new BigDecimal("7.3"))
+ test(uuid, UUID.randomUUID)
+ test(lexicalUuid, UUID.randomUUID)
+ test(timeUuid, UUIDGen.getTimeUUID)
+ test(timestamp, new Date)
+ test(boolean, true.asInstanceOf[java.lang.Boolean])
+ }
+
+ test("columns from frozen set") {
+ val column = Column("cell")
+ val `type` = set(utf8, false)
+ val bb = `type`.decompose(Set("a", "b").asJava)
+ columns(column, `type`, bb) shouldBe Columns(column.withValue("b"), column.withValue("a"))
+ }
+
+ test("columns from frozen list") {
+ val column = Column("cell")
+ val `type` = list(utf8, false)
+ val bb = `type`.decompose(List("a", "b").asJava)
+ columns(column, `type`, bb) shouldBe Columns(column.withValue("b"), column.withValue("a"))
+ }
+
+ test("columns from frozen map") {
+ val column = Column("cell")
+ val `type` = map(utf8, utf8, true)
+ val bb = `type`.decompose(Map("k1" -> "v1", "k2" -> "v2").asJava)
+ columns(column, `type`, bb) shouldBe
+ Columns(column.withMapName("k2").withValue("v2"), column.withMapName("k1").withValue("v1"))
+ }
+
+ test("columns from tuple") {
+ val column = Column("cell")
+ val `type` = new TupleType(Lists.newArrayList(utf8, utf8))
+ val bb = TupleType.buildValue(Array(utf8.decompose("a"), utf8.decompose("b")))
+ columns(column, `type`, bb) shouldBe
+ Columns(column.withUDTName("0").withValue("a"), column.withUDTName("1").withValue("b"))
+ }
+
+ test("columns from UDT") {
+ val column = Column("cell")
+ val `type` = udt(List("a", "b"), List(utf8, utf8))
+ val bb = TupleType.buildValue(Array(utf8.decompose("1"), utf8.decompose("2")))
+ columns(column, `type`, bb) shouldBe
+ Columns(column.withUDTName("a").withValue("1"), column.withUDTName("b").withValue("2"))
+ }
+
+ test("columns from regular cell") {
+ val columnDefinition = ColumnDefinition.regularDef("ks", "cf", "cell", utf8)
+ val cell = new BufferCell(
+ columnDefinition,
+ System.currentTimeMillis(),
+ Cell.NO_TTL,
+ Cell.NO_DELETION_TIME,
+ utf8.decompose("a"),
+ null)
+ columns(cell) shouldBe Columns(Column("cell").withValue("a"))
+ }
+}
\ No newline at end of file
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/key/TokenMapperTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/TokenMapperTest.scala
similarity index 97%
rename from plugin/src/test/scala/com/stratio/cassandra/lucene/key/TokenMapperTest.scala
rename to plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/TokenMapperTest.scala
index c6e8c7b88..c9f24ec16 100644
--- a/plugin/src/test/scala/com/stratio/cassandra/lucene/key/TokenMapperTest.scala
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/mapping/TokenMapperTest.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.stratio.cassandra.lucene.key
+package com.stratio.cassandra.lucene.mapping
import com.stratio.cassandra.lucene.BaseScalaTest
import com.stratio.cassandra.lucene.util.ByteBufferUtils.toHex
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnNoneTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnNoneTest.scala
new file mode 100644
index 000000000..ce0469af6
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnNoneTest.scala
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.partitioning
+
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+
+/** Tests for [[PartitionerOnNone]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+@RunWith(classOf[JUnitRunner])
+class PartitionerOnNoneTest extends PartitionerTest {
+
+ test("parse") {
+ Partitioner.fromJson("{type:\"none\"}") shouldBe PartitionerOnNone()
+ }
+
+ test("num partitions") {
+ PartitionerOnNone().numPartitions shouldBe 1
+ }
+
+ test("key partition") {
+ for (i <- 1 to 10) PartitionerOnNone().partition(key(i)) shouldBe 0
+ }
+
+}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnTokenTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnTokenTest.scala
new file mode 100644
index 000000000..91040dbf7
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnTokenTest.scala
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.partitioning
+
+import com.stratio.cassandra.lucene.IndexException
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+
+/** Tests for [[PartitionerOnToken]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+@RunWith(classOf[JUnitRunner])
+class PartitionerOnTokenTest extends PartitionerTest {
+
+ test("build with zero partitions") {
+ assertThrows[IndexException] {PartitionerOnToken(0)}
+ }
+
+ test("build with negative partitions") {
+ assertThrows[IndexException] {PartitionerOnToken(-1)}
+ }
+
+ test("parse") {
+ Partitioner.fromJson("{type:\"token\", partitions: 10}") shouldBe PartitionerOnToken(10)
+ }
+
+ test("num partitions") {
+ PartitionerOnToken(4).numPartitions shouldBe 4
+ }
+
+ test("key partition with 1 partition") {
+ for (i <- 1 to 10) {
+ PartitionerOnToken(1).partition(key(i)) shouldBe 0
+ }
+ }
+
+ test("key partition with n partitions") {
+ PartitionerOnToken(10).partition(key(0)) shouldBe 8
+ PartitionerOnToken(10).partition(key(1)) shouldBe 9
+ PartitionerOnToken(10).partition(key(2)) shouldBe 2
+ PartitionerOnToken(10).partition(key(3)) shouldBe 5
+ PartitionerOnToken(10).partition(key(4)) shouldBe 5
+ PartitionerOnToken(10).partition(key(5)) shouldBe 4
+ PartitionerOnToken(10).partition(key(6)) shouldBe 8
+ PartitionerOnToken(10).partition(key(7)) shouldBe 6
+ }
+
+}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerTest.scala
new file mode 100644
index 000000000..0754ab7a6
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/partitioning/PartitionerTest.scala
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.partitioning
+
+import com.stratio.cassandra.lucene.BaseScalaTest
+import com.stratio.cassandra.lucene.BaseScalaTest.int32
+import org.apache.cassandra.db.DecoratedKey
+import org.apache.cassandra.dht.Murmur3Partitioner
+
+/** Tests for [[Partitioner]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class PartitionerTest extends BaseScalaTest {
+
+ test("parse default") {
+ Partitioner.fromJson("{}") shouldBe PartitionerOnNone()
+ }
+
+ test("num partitions wiht none partitioner") {
+ PartitionerOnNone().allPartitions shouldBe List(0)
+ }
+
+ test("num partitions with token partitioner") {
+ PartitionerOnToken(4).allPartitions shouldBe List(0, 1, 2, 3)
+ }
+
+ def key(n: Int): DecoratedKey = Murmur3Partitioner.instance.decorateKey(int32.decompose(n))
+
+}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/util/BytBufferUtilsTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/util/BytBufferUtilsTest.scala
new file mode 100644
index 000000000..63a499fe7
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/util/BytBufferUtilsTest.scala
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+import java.nio.ByteBuffer
+
+import com.stratio.cassandra.lucene.BaseScalaTest
+import com.stratio.cassandra.lucene.BaseScalaTest._
+import org.apache.cassandra.db.marshal.CompositeType
+import org.apache.cassandra.utils.ByteBufferUtil
+
+/** Class for testing [[ByteBufferUtils]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class BytBufferUtilsTest extends BaseScalaTest {
+
+ test("test BytesRef") {
+ val t = CompositeType.getInstance(utf8, int32)
+ val in = t.builder().add(utf8.decompose("monkey")).add(int32.decompose(1)).build()
+ val bytesRef = ByteBufferUtils.bytesRef(in)
+ val out1 = ByteBufferUtils.byteBuffer(bytesRef)
+ ByteBufferUtil.compareUnsigned(in, out1) shouldBe 0
+ val out2 = ByteBufferUtils.byteBuffer(bytesRef)
+ ByteBufferUtil.compareUnsigned(in, out2) shouldBe 0
+ }
+
+ test("isEmpty true") {
+ val bb = ByteBuffer.allocate(0)
+ ByteBufferUtils.isEmpty(bb) shouldBe true
+ }
+
+ test("isEmpty false") {
+ val bb = ByteBuffer.allocate(10)
+ ByteBufferUtils.isEmpty(bb) shouldBe false
+ }
+
+ test("split simple") {
+ val bb = utf8.decompose("test")
+ ByteBufferUtils.split(bb, utf8).length shouldBe 1
+ }
+
+ test("split composite") {
+ val t = CompositeType.getInstance(utf8, int32)
+ val bb = t.builder.add(utf8.decompose("1")).add(int32.decompose(1)).build
+ ByteBufferUtils.split(bb, t).length shouldBe 2
+ }
+
+ test("compose-decompose") {
+ val bbs = ByteBufferUtils.decompose(
+ ByteBufferUtils.compose(
+ utf8.decompose("test"),
+ int32.decompose(999),
+ boolean.decompose(true)))
+ bbs.length shouldBe 3
+ utf8.compose(bbs(0)) shouldBe "test"
+ int32.compose(bbs(1)) shouldBe 999
+ boolean.compose(bbs(2)) shouldBe true
+ }
+
+ test("compose-decompose empty") {
+ ByteBufferUtils.decompose(ByteBufferUtils.compose()).length shouldBe 0
+ }
+}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/util/SchemaValidatorTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/util/SchemaValidatorTest.scala
new file mode 100644
index 000000000..2de1ac987
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/util/SchemaValidatorTest.scala
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+import com.stratio.cassandra.lucene.BaseScalaTest
+import com.stratio.cassandra.lucene.BaseScalaTest._
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import com.stratio.cassandra.lucene.util.SchemaValidator._
+
+/** Tests for [[SchemaValidator]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+@RunWith(classOf[JUnitRunner])
+class SchemaValidatorTest extends BaseScalaTest {
+
+ test("supports regular") {
+ supports(utf8, List(classOf[String])) shouldBe true
+ supports(utf8, List(classOf[Number])) shouldBe false
+ supports(utf8, List(classOf[String], classOf[Number])) shouldBe true
+ supports(utf8, List(classOf[Number], classOf[String])) shouldBe true
+ }
+
+ test("supports list") {
+ supports(list(utf8, false), List(classOf[String])) shouldBe true
+ supports(list(utf8, true), List(classOf[String])) shouldBe true
+ supports(list(int32, false), List(classOf[String])) shouldBe false
+ supports(list(int32, true), List(classOf[String])) shouldBe false
+ }
+
+ test("supports set") {
+ supports(set(utf8, false), List(classOf[String])) shouldBe true
+ supports(set(utf8, true), List(classOf[String])) shouldBe true
+ supports(set(int32, false), List(classOf[String])) shouldBe false
+ supports(set(int32, true), List(classOf[String])) shouldBe false
+ }
+
+ test("supports map") {
+ supports(map(int32, utf8, false), List(classOf[String])) shouldBe true
+ supports(map(int32, utf8, true), List(classOf[String])) shouldBe true
+ supports(map(utf8, int32, false), List(classOf[String])) shouldBe false
+ supports(map(utf8, int32, true), List(classOf[String])) shouldBe false
+ }
+
+ test("supports reversed") {
+ supports(reversed(utf8), List(classOf[String])) shouldBe true
+ supports(reversed(int32), List(classOf[String])) shouldBe false
+ supports(reversed(utf8), List(classOf[String], classOf[Number])) shouldBe true
+ supports(reversed(utf8), List(classOf[Number], classOf[String])) shouldBe true
+ }
+
+ test("child regular") {
+ childType(utf8, "") shouldBe None
+ }
+
+ test("child UDT") {
+ val userType = udt(List("a", "b"), List(utf8, int32))
+ childType(userType, "a") shouldBe Some(utf8)
+ childType(userType, "b") shouldBe Some(int32)
+ childType(userType, "c") shouldBe None
+ }
+
+ test("child regular set") {
+ val setType = set(utf8, true)
+ childType(setType, "a") shouldBe None
+ }
+
+ test("child UDT set") {
+ val userType = udt(List("a", "b"), List(utf8, int32))
+ val setType = set(userType, true)
+ childType(setType, "a") shouldBe Some(utf8)
+ childType(setType, "b") shouldBe Some(int32)
+ childType(setType, "c") shouldBe None
+ }
+
+ test("child frozen UDT set") {
+ val userType = udt(List("a", "b"), List(utf8, int32))
+ val setType = set(userType, false)
+ childType(setType, "a") shouldBe Some(utf8)
+ childType(setType, "b") shouldBe Some(int32)
+ childType(setType, "c") shouldBe None
+ }
+
+ test("child regular list") {
+ val listType = list(utf8, true)
+ childType(listType, "a") shouldBe None
+ }
+
+ test("child UDT list") {
+ val userType = udt(List("a", "b"), List(utf8, int32))
+ val listType = list(userType, true)
+ childType(listType, "a") shouldBe Some(utf8)
+ childType(listType, "b") shouldBe Some(int32)
+ childType(listType, "c") shouldBe None
+ }
+
+ test("child frozen UDT list") {
+ val userType = udt(List("a", "b"), List(utf8, int32))
+ val listType = list(userType, false)
+ childType(listType, "a") shouldBe Some(utf8)
+ childType(listType, "b") shouldBe Some(int32)
+ childType(listType, "c") shouldBe None
+ }
+
+ test("child regular map") {
+ val mapType = map(utf8, utf8, true)
+ childType(mapType, "a") shouldBe None
+ }
+
+ test("child UDT map") {
+ val userType = udt(List("a", "b"), List(utf8, int32))
+ val mapType = map(utf8, userType, true)
+ childType(mapType, "a") shouldBe Some(utf8)
+ childType(mapType, "b") shouldBe Some(int32)
+ childType(mapType, "c") shouldBe None
+ }
+
+ test("child frozen UDT map") {
+ val userType = udt(List("a", "b"), List(utf8, int32))
+ val mapType = map(utf8, userType, false)
+ childType(mapType, "a") shouldBe Some(utf8)
+ childType(mapType, "b") shouldBe Some(int32)
+ childType(mapType, "c") shouldBe None
+ }
+
+}
diff --git a/plugin/src/test/scala/com/stratio/cassandra/lucene/util/TimeCounterTest.scala b/plugin/src/test/scala/com/stratio/cassandra/lucene/util/TimeCounterTest.scala
new file mode 100644
index 000000000..b15b990fb
--- /dev/null
+++ b/plugin/src/test/scala/com/stratio/cassandra/lucene/util/TimeCounterTest.scala
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2014 Stratio (http://stratio.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.stratio.cassandra.lucene.util
+
+import com.stratio.cassandra.lucene.BaseScalaTest
+
+/** Class for testing [[TimeCounter]].
+ *
+ * @author Andres de la Pena `adelapena@stratio.com`
+ */
+class TimeCounterTest extends BaseScalaTest {
+
+ test("reusable") {
+ val started = TimeCounter.create.start
+ Thread.sleep(10)
+ assert(Range(10, 1000).contains(started.time))
+ val stopped = started.stop
+ assert(Range(10, 1000).contains(stopped.time))
+ stopped.toString shouldBe s"${stopped.time} ms"
+
+ Thread.sleep(1000)
+
+ val newStarted = stopped.start
+ Thread.sleep(10)
+ assert(Range(20, 1000).contains(newStarted.time))
+ val newStopped = newStarted.stop
+ assert(Range(20, 1000).contains(newStopped.time))
+ newStopped.toString shouldBe s"${newStopped.time} ms"
+ }
+
+ test("immutable") {
+ val tc = TimeCounter.create
+ tc.start.stop.start
+ Thread.sleep(10)
+ tc.time shouldBe 0
+ }
+
+}
diff --git a/testsAT/pom.xml b/testsAT/pom.xml
index f6bceef3c..fc45c2317 100644
--- a/testsAT/pom.xml
+++ b/testsAT/pom.xml
@@ -26,7 +26,6 @@
com.stratio.cassandra
cassandra-lucene-index-tests
3.8.2-RC1-SNAPSHOT
-
jar
Cassandra Lucene Index acceptance tests
Cassandra Lucene Index acceptance tests
diff --git a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/CheckNonFrozenUDTAT.java b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/CheckNonFrozenUDTIT.java
similarity index 98%
rename from testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/CheckNonFrozenUDTAT.java
rename to testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/CheckNonFrozenUDTIT.java
index 40b003e8b..a412c39e2 100644
--- a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/CheckNonFrozenUDTAT.java
+++ b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/CheckNonFrozenUDTIT.java
@@ -30,7 +30,7 @@
* @author Eduardo Alonso {@literal }
*/
@RunWith(JUnit4.class)
-public class CheckNonFrozenUDTAT extends BaseIT {
+public class CheckNonFrozenUDTIT extends BaseIT {
@Test
public void testNotFrozenUDTList() {
diff --git a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTPartialUpdateIT.java b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTPartialUpdateIT.java
index cbca75c82..209e19cdf 100644
--- a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTPartialUpdateIT.java
+++ b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTPartialUpdateIT.java
@@ -90,6 +90,7 @@ public void testNonFrozenPartialUpdate() {
} catch (InvalidQueryException e) {
if (e.getMessage().equals("Non-frozen User-Defined types are not supported, please use frozen<>")) {
logger.info("Ignoring UDT partial update test because it isn't supported by current Cassandra version");
+ utils.dropKeyspace();
return;
}
}
diff --git a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTValidationIT.java b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTValidationIT.java
index 5eab50509..0a5350b87 100644
--- a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTValidationIT.java
+++ b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/udt/UDTValidationIT.java
@@ -87,7 +87,7 @@ public void testInvalidCreateIndex() {
.createTable()
.createIndex(InvalidConfigurationInQueryException.class,
"'schema' is invalid : No column definition 'address.non-existent' " +
- "for mapper 'address.non-existent.latitude'")
+ "for field 'address.non-existent.latitude'")
.dropTable()
.dropKeyspace();
}
@@ -117,7 +117,7 @@ public void testInvalidCreateIndex2() {
.createTable()
.createIndex(InvalidConfigurationInQueryException.class,
"'schema' is invalid : No column definition 'address.non-existent' " +
- "for mapper 'address.non-existent'")
+ "for field 'address.non-existent'")
.dropTable()
.dropKeyspace();
}
@@ -189,7 +189,7 @@ public void testInvalidCreateIndex4() {
.createTable()
.createIndex(InvalidConfigurationInQueryException.class,
"'schema' is invalid : No column definition " +
- "'address.point.longitude.non-existent' for mapper 'address.point.longitude.non-existent'")
+ "'address.point.longitude.non-existent' for field 'address.point.longitude.non-existent'")
.dropTable()
.dropKeyspace();
}
diff --git a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraConfig.java b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraConfig.java
index 9f3b42b3a..2fc0c0e17 100644
--- a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraConfig.java
+++ b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraConfig.java
@@ -16,6 +16,7 @@
package com.stratio.cassandra.lucene.testsAT.util;
import com.datastax.driver.core.ConsistencyLevel;
+import com.stratio.cassandra.lucene.builder.index.Partitioner;
import java.net.InetAddress;
import java.net.UnknownHostException;
@@ -44,6 +45,7 @@ class CassandraConfig {
static final String COLUMN = getString("column", "lucene");
static final boolean USE_NEW_QUERY_SYNTAX = getBool("use_new_query_syntax", true);
static final int LIMIT = getInt("limit", 10000);
+ static final Partitioner PARTITIONER = new Partitioner.None();
static {
assert COLUMN != null || USE_NEW_QUERY_SYNTAX;
diff --git a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraUtils.java b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraUtils.java
index 2e22bb7a2..4244e98bd 100644
--- a/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraUtils.java
+++ b/testsAT/src/test/java/com/stratio/cassandra/lucene/testsAT/util/CassandraUtils.java
@@ -256,7 +256,8 @@ public CassandraUtils truncateTable() {
public CassandraUtils createIndex() {
Index index = index(keyspace, table, indexName).column(indexColumn)
.refreshSeconds(REFRESH)
- .indexingThreads(THREADS);
+ .indexingThreads(THREADS)
+ .partitioner(PARTITIONER);
mappers.forEach(index::mapper);
analyzers.forEach(index::analyzer);
execute(index.build());