diff --git a/.ci/bwcVersions b/.ci/bwcVersions
index 347b88ecc3e4d..1ba3ee562317a 100644
--- a/.ci/bwcVersions
+++ b/.ci/bwcVersions
@@ -48,3 +48,5 @@ BWC_VERSION:
- "2.1.0"
- "2.1.1"
- "2.2.0"
+ - "2.2.1"
+ - "2.3.0"
diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml
index 73f3b6c2487d3..b8d3912c5864a 100644
--- a/.github/workflows/auto-release.yml
+++ b/.github/workflows/auto-release.yml
@@ -3,7 +3,7 @@ name: Releases
on:
push:
tags:
- - '*.*.*'
+ - '*'
jobs:
@@ -12,11 +12,18 @@ jobs:
permissions:
contents: write
steps:
+ - name: GitHub App token
+ id: github_app_token
+ uses: tibdex/github-app-token@v1.5.0
+ with:
+ app_id: ${{ secrets.APP_ID }}
+ private_key: ${{ secrets.APP_PRIVATE_KEY }}
+ installation_id: 22958780
- name: Get tag
id: tag
uses: dawidd6/action-get-tag@v1
- uses: actions/checkout@v2
- uses: ncipollo/release-action@v1
with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ github_token: ${{ steps.github_app_token.outputs.token }}
bodyFile: release-notes/opensearch.release-notes-${{steps.tag.outputs.tag}}.md
diff --git a/.github/workflows/gradle-check.yml b/.github/workflows/gradle-check.yml
index dec5ee15d0bea..cbaa7fa10fbb6 100644
--- a/.github/workflows/gradle-check.yml
+++ b/.github/workflows/gradle-check.yml
@@ -2,9 +2,9 @@ name: Gradle Check (Jenkins)
on:
push:
branches-ignore:
- - 'backport/*'
- - 'create-pull-request/*'
- - 'dependabot/*'
+ - 'backport/**'
+ - 'create-pull-request/**'
+ - 'dependabot/**'
pull_request_target:
types: [opened, synchronize, reopened]
diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md
index ce84d9658a808..8c2a6b4889122 100644
--- a/DEVELOPER_GUIDE.md
+++ b/DEVELOPER_GUIDE.md
@@ -4,7 +4,8 @@
- [Install Prerequisites](#install-prerequisites)
- [JDK 11](#jdk-11)
- [JDK 14](#jdk-14)
- - [Runtime JDK](#runtime-jdk)
+ - [JDK 17](#jdk-17)
+ - [Custom Runtime JDK](#custom-runtime-jdk)
- [Windows](#windows)
- [Docker](#docker)
- [Build](#build)
@@ -12,6 +13,7 @@
- [Run OpenSearch](#run-opensearch)
- [Use an Editor](#use-an-editor)
- [IntelliJ IDEA](#intellij-idea)
+ - [Remote development using JetBrains Gateway](#remote-development-using-jetbrains-gateway)
- [Visual Studio Code](#visual-studio-code)
- [Eclipse](#eclipse)
- [Project Layout](#project-layout)
@@ -35,6 +37,7 @@
- [testImplementation](#testimplementation)
- [Gradle Plugins](#gradle-plugins)
- [Distribution Download Plugin](#distribution-download-plugin)
+ - [Creating fat-JAR of a Module](#creating-fat-jar-of-a-module)
- [Misc](#misc)
- [git-secrets](#git-secrets)
- [Installation](#installation)
@@ -49,7 +52,7 @@
- [Submitting Changes](#submitting-changes)
- [Backports](#backports)
- [LineLint](#linelint)
- - [Lucene Snapshots](#lucene-snapshots)
+- [Lucene Snapshots](#lucene-snapshots)
# Developer Guide
@@ -374,6 +377,42 @@ The Distribution Download plugin downloads the latest version of OpenSearch by d
./gradlew integTest -PcustomDistributionUrl="https://ci.opensearch.org/ci/dbc/bundle-build/1.2.0/1127/linux/x64/dist/opensearch-1.2.0-linux-x64.tar.gz"
```
+### Creating fat-JAR of a Module
+
+A fat-JAR (or an uber-JAR) is the JAR, which contains classes from all the libraries, on which your project depends and, of course, the classes of current project.
+
+There might be cases where a developer would like to add some custom logic to the code of a module (or multiple modules) and generate a fat-JAR that can be directly used by the dependency management tool. For example, in [#3665](https://github.com/opensearch-project/OpenSearch/pull/3665) a developer wanted to provide a tentative patch as a fat-JAR to a consumer for changes made in the high level REST client.
+
+Use [Gradle Shadow plugin](https://imperceptiblethoughts.com/shadow/).
+Add the following to the `build.gradle` file of the module for which you want to create the fat-JAR, e.g. `client/rest-high-level/build.gradle`:
+
+```
+apply plugin: 'com.github.johnrengelman.shadow'
+```
+
+Run the `shadowJar` command using:
+```
+./gradlew :client:rest-high-level:shadowJar
+```
+
+This will generate a fat-JAR in the `build/distributions` folder of the module, e.g. .`/client/rest-high-level/build/distributions/opensearch-rest-high-level-client-1.4.0-SNAPSHOT.jar`.
+
+You can further customize your fat-JAR by customising the plugin, More information about shadow plugin can be found [here](https://imperceptiblethoughts.com/shadow/).
+
+To use the generated JAR, install the JAR locally, e.g.
+```
+mvn install:install-file -Dfile=src/main/resources/opensearch-rest-high-level-client-1.4.0-SNAPSHOT.jar -DgroupId=org.opensearch.client -DartifactId=opensearch-rest-high-level-client -Dversion=1.4.0-SNAPSHOT -Dpackaging=jar -DgeneratePom=true
+```
+
+Refer the installed JAR as any other maven artifact, e.g.
+
+```
+
+ org.opensearch.client
+ opensearch-rest-high-level-client
+ 1.4.0-SNAPSHOT
+
+```
## Misc
diff --git a/build.gradle b/build.gradle
index e0bb961ce14c2..ce5ea6cdd7e11 100644
--- a/build.gradle
+++ b/build.gradle
@@ -55,7 +55,7 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
- id "com.diffplug.spotless" version "6.9.0" apply false
+ id "com.diffplug.spotless" version "6.9.1" apply false
id "org.gradle.test-retry" version "1.4.0" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
index c947a457d33ec..b14e93ecfd22d 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
@@ -75,9 +75,9 @@
import java.util.stream.Stream;
public class DistroTestPlugin implements Plugin {
- private static final String SYSTEM_JDK_VERSION = "11.0.15+10";
+ private static final String SYSTEM_JDK_VERSION = "11.0.16+8";
private static final String SYSTEM_JDK_VENDOR = "adoptium";
- private static final String GRADLE_JDK_VERSION = "17.0.3+7";
+ private static final String GRADLE_JDK_VERSION = "17.0.4+8";
private static final String GRADLE_JDK_VENDOR = "adoptium";
// all distributions used by distro tests. this is temporary until tests are per distribution
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java
index ef52adab6377a..0f5348d5a8dcf 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java
@@ -84,6 +84,8 @@ public class OpenSearchCluster implements TestClusterConfiguration, Named {
private final ArchiveOperations archiveOperations;
private int nodeIndex = 0;
+ private int zoneCount = 1;
+
public OpenSearchCluster(
String clusterName,
Project project,
@@ -104,13 +106,21 @@ public OpenSearchCluster(
this.bwcJdk = bwcJdk;
// Always add the first node
- addNode(clusterName + "-0");
+ String zone = hasZoneProperty() ? "zone-1" : "";
+ addNode(clusterName + "-0", zone);
// configure the cluster name eagerly so all nodes know about it
this.nodes.all((node) -> node.defaultConfig.put("cluster.name", safeName(clusterName)));
addWaitForClusterHealth();
}
+ public void setNumberOfZones(int zoneCount) {
+ if (zoneCount < 1) {
+ throw new IllegalArgumentException("Number of zones should be >= 1 but was " + zoneCount + " for " + this);
+ }
+ this.zoneCount = zoneCount;
+ }
+
public void setNumberOfNodes(int numberOfNodes) {
checkFrozen();
@@ -124,12 +134,31 @@ public void setNumberOfNodes(int numberOfNodes) {
);
}
- for (int i = nodes.size(); i < numberOfNodes; i++) {
- addNode(clusterName + "-" + i);
+ if (numberOfNodes < zoneCount) {
+ throw new IllegalArgumentException(
+ "Number of nodes should be >= zoneCount but was " + numberOfNodes + " for " + this.zoneCount
+ );
}
+
+ if (hasZoneProperty()) {
+ int currentZone;
+ for (int i = nodes.size(); i < numberOfNodes; i++) {
+ currentZone = i % zoneCount + 1;
+ String zoneName = "zone-" + currentZone;
+ addNode(clusterName + "-" + i, zoneName);
+ }
+ } else {
+ for (int i = nodes.size(); i < numberOfNodes; i++) {
+ addNode(clusterName + "-" + i, "");
+ }
+ }
+ }
+
+ private boolean hasZoneProperty() {
+ return this.project.findProperty("numZones") != null;
}
- private void addNode(String nodeName) {
+ private void addNode(String nodeName, String zoneName) {
OpenSearchNode newNode = new OpenSearchNode(
path,
nodeName,
@@ -138,7 +167,8 @@ private void addNode(String nodeName) {
fileSystemOperations,
archiveOperations,
workingDirBase,
- bwcJdk
+ bwcJdk,
+ zoneName
);
// configure the cluster name eagerly
newNode.defaultConfig.put("cluster.name", safeName(clusterName));
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java
index b051c15e81d6d..ab765efde7885 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java
@@ -32,6 +32,7 @@
package org.opensearch.gradle.testclusters;
import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
import org.opensearch.gradle.Architecture;
import org.opensearch.gradle.DistributionDownloadPlugin;
import org.opensearch.gradle.OpenSearchDistribution;
@@ -175,6 +176,8 @@ public class OpenSearchNode implements TestClusterConfiguration {
private final Config legacyESConfig;
private Config currentConfig;
+ private String zone;
+
OpenSearchNode(
String path,
String name,
@@ -183,7 +186,8 @@ public class OpenSearchNode implements TestClusterConfiguration {
FileSystemOperations fileSystemOperations,
ArchiveOperations archiveOperations,
File workingDirBase,
- Jdk bwcJdk
+ Jdk bwcJdk,
+ String zone
) {
this.path = path;
this.name = name;
@@ -205,6 +209,7 @@ public class OpenSearchNode implements TestClusterConfiguration {
opensearchConfig = Config.getOpenSearchConfig(workingDir);
legacyESConfig = Config.getLegacyESConfig(workingDir);
currentConfig = opensearchConfig;
+ this.zone = zone;
}
/*
@@ -1239,6 +1244,10 @@ private void createConfiguration() {
baseConfig.put("path.logs", confPathLogs.toAbsolutePath().toString());
baseConfig.put("path.shared_data", workingDir.resolve("sharedData").toString());
baseConfig.put("node.attr.testattr", "test");
+ if (StringUtils.isNotBlank(zone)) {
+ baseConfig.put("cluster.routing.allocation.awareness.attributes", "zone");
+ baseConfig.put("node.attr.zone", zone);
+ }
baseConfig.put("node.portsfile", "true");
baseConfig.put("http.port", httpPort);
if (getVersion().onOrAfter(Version.fromString("6.7.0"))) {
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index fc751d8461e92..4af1acfed0ab2 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,8 +1,8 @@
opensearch = 3.0.0
-lucene = 9.3.0
+lucene = 9.4.0-snapshot-ddf0d0a
bundled_jdk_vendor = adoptium
-bundled_jdk = 17.0.3+7
+bundled_jdk = 17.0.4+8
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
index d293b979debb5..7ae8f8826c5a4 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
@@ -157,7 +157,6 @@
import org.opensearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.opensearch.search.aggregations.metrics.CardinalityAggregationBuilder;
import org.opensearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder;
-import org.opensearch.search.aggregations.metrics.GeoBoundsAggregationBuilder;
import org.opensearch.search.aggregations.metrics.GeoCentroidAggregationBuilder;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentileRanks;
import org.opensearch.search.aggregations.metrics.InternalHDRPercentiles;
@@ -169,7 +168,6 @@
import org.opensearch.search.aggregations.metrics.ParsedAvg;
import org.opensearch.search.aggregations.metrics.ParsedCardinality;
import org.opensearch.search.aggregations.metrics.ParsedExtendedStats;
-import org.opensearch.search.aggregations.metrics.ParsedGeoBounds;
import org.opensearch.search.aggregations.metrics.ParsedGeoCentroid;
import org.opensearch.search.aggregations.metrics.ParsedHDRPercentileRanks;
import org.opensearch.search.aggregations.metrics.ParsedHDRPercentiles;
@@ -2116,7 +2114,6 @@ static List getDefaultNamedXContents() {
map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c));
map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c));
map.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedExtendedStatsBucket.fromXContent(p, (String) c));
- map.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c));
map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c));
map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c));
map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c));
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java
index efcc13921c398..3da0f81023f72 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java
@@ -885,7 +885,8 @@ public void testApiNamingConventions() throws Exception {
"nodes.hot_threads",
"nodes.usage",
"nodes.reload_secure_settings",
- "search_shards", };
+ "search_shards",
+ "remote_store.restore", };
List booleanReturnMethods = Arrays.asList("security.enable_user", "security.disable_user", "security.change_password");
Set deprecatedMethods = new HashSet<>();
deprecatedMethods.add("indices.force_merge");
diff --git a/distribution/docker/src/docker/config/log4j2.properties b/distribution/docker/src/docker/config/log4j2.properties
index a8c54137c7fd2..761478a9fdc6e 100644
--- a/distribution/docker/src/docker/config/log4j2.properties
+++ b/distribution/docker/src/docker/config/log4j2.properties
@@ -53,3 +53,13 @@ logger.index_indexing_slowlog.name = index.indexing.slowlog.index
logger.index_indexing_slowlog.level = trace
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
logger.index_indexing_slowlog.additivity = false
+
+appender.task_detailslog_rolling.type = Console
+appender.task_detailslog_rolling.name = task_detailslog_rolling
+appender.task_detailslog_rolling.layout.type = OpenSearchJsonLayout
+appender.task_detailslog_rolling.layout.type_name = task_detailslog
+
+logger.task_detailslog_rolling.name = task.detailslog
+logger.task_detailslog_rolling.level = trace
+logger.task_detailslog_rolling.appenderRef.task_detailslog_rolling.ref = task_detailslog_rolling
+logger.task_detailslog_rolling.additivity = false
diff --git a/distribution/src/config/log4j2.properties b/distribution/src/config/log4j2.properties
index 4820396c79eb7..bb27aaf2e22e6 100644
--- a/distribution/src/config/log4j2.properties
+++ b/distribution/src/config/log4j2.properties
@@ -195,3 +195,40 @@ logger.index_indexing_slowlog.level = trace
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling_old.ref = index_indexing_slowlog_rolling_old
logger.index_indexing_slowlog.additivity = false
+
+######## Task details log JSON ####################
+appender.task_detailslog_rolling.type = RollingFile
+appender.task_detailslog_rolling.name = task_detailslog_rolling
+appender.task_detailslog_rolling.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog.json
+appender.task_detailslog_rolling.filePermissions = rw-r-----
+appender.task_detailslog_rolling.layout.type = OpenSearchJsonLayout
+appender.task_detailslog_rolling.layout.type_name = task_detailslog
+appender.task_detailslog_rolling.layout.opensearchmessagefields=taskId,type,action,description,start_time_millis,resource_stats,metadata
+
+appender.task_detailslog_rolling.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog-%i.json.gz
+appender.task_detailslog_rolling.policies.type = Policies
+appender.task_detailslog_rolling.policies.size.type = SizeBasedTriggeringPolicy
+appender.task_detailslog_rolling.policies.size.size = 1GB
+appender.task_detailslog_rolling.strategy.type = DefaultRolloverStrategy
+appender.task_detailslog_rolling.strategy.max = 4
+#################################################
+######## Task details log - old style pattern ####
+appender.task_detailslog_rolling_old.type = RollingFile
+appender.task_detailslog_rolling_old.name = task_detailslog_rolling_old
+appender.task_detailslog_rolling_old.fileName = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog.log
+appender.task_detailslog_rolling_old.filePermissions = rw-r-----
+appender.task_detailslog_rolling_old.layout.type = PatternLayout
+appender.task_detailslog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
+
+appender.task_detailslog_rolling_old.filePattern = ${sys:opensearch.logs.base_path}${sys:file.separator}${sys:opensearch.logs.cluster_name}_task_detailslog-%i.log.gz
+appender.task_detailslog_rolling_old.policies.type = Policies
+appender.task_detailslog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
+appender.task_detailslog_rolling_old.policies.size.size = 1GB
+appender.task_detailslog_rolling_old.strategy.type = DefaultRolloverStrategy
+appender.task_detailslog_rolling_old.strategy.max = 4
+#################################################
+logger.task_detailslog_rolling.name = task.detailslog
+logger.task_detailslog_rolling.level = trace
+logger.task_detailslog_rolling.appenderRef.task_detailslog_rolling.ref = task_detailslog_rolling
+logger.task_detailslog_rolling.appenderRef.task_detailslog_rolling_old.ref = task_detailslog_rolling_old
+logger.task_detailslog_rolling.additivity = false
diff --git a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ImportLog4jPropertiesTaskTests.java b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ImportLog4jPropertiesTaskTests.java
index 7f67e08c66b9e..96544d3297ad4 100644
--- a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ImportLog4jPropertiesTaskTests.java
+++ b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ImportLog4jPropertiesTaskTests.java
@@ -67,7 +67,7 @@ public void testImportLog4jPropertiesTask() throws IOException {
Properties properties = new Properties();
properties.load(Files.newInputStream(taskInput.getOpenSearchConfig().resolve(ImportLog4jPropertiesTask.LOG4J_PROPERTIES)));
assertThat(properties, is(notNullValue()));
- assertThat(properties.entrySet(), hasSize(137));
+ assertThat(properties.entrySet(), hasSize(165));
assertThat(properties.get("appender.rolling.layout.type"), equalTo("OpenSearchJsonLayout"));
assertThat(
properties.get("appender.deprecation_rolling.fileName"),
diff --git a/distribution/tools/upgrade-cli/src/test/resources/config/log4j2.properties b/distribution/tools/upgrade-cli/src/test/resources/config/log4j2.properties
index b9ad71121165a..4b92d3fc62376 100644
--- a/distribution/tools/upgrade-cli/src/test/resources/config/log4j2.properties
+++ b/distribution/tools/upgrade-cli/src/test/resources/config/log4j2.properties
@@ -176,3 +176,38 @@ logger.index_indexing_slowlog.level = trace
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling_old.ref = index_indexing_slowlog_rolling_old
logger.index_indexing_slowlog.additivity = false
+
+######## Task details log JSON ####################
+appender.task_detailslog_rolling.type = RollingFile
+appender.task_detailslog_rolling.name = task_detailslog_rolling
+appender.task_detailslog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_task_detailslog.json
+appender.task_detailslog_rolling.layout.type = ESJsonLayout
+appender.task_detailslog_rolling.layout.type_name = task_detailslog
+appender.task_detailslog_rolling.layout.esmessagefields=taskId,type,action,description,start_time_millis,resource_stats,metadata
+
+appender.task_detailslog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_task_detailslog-%i.json.gz
+appender.task_detailslog_rolling.policies.type = Policies
+appender.task_detailslog_rolling.policies.size.type = SizeBasedTriggeringPolicy
+appender.task_detailslog_rolling.policies.size.size = 1GB
+appender.task_detailslog_rolling.strategy.type = DefaultRolloverStrategy
+appender.task_detailslog_rolling.strategy.max = 4
+#################################################
+######## Task details log - old style pattern ####
+appender.task_detailslog_rolling_old.type = RollingFile
+appender.task_detailslog_rolling_old.name = task_detailslog_rolling_old
+appender.task_detailslog_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_task_detailslog.log
+appender.task_detailslog_rolling_old.layout.type = PatternLayout
+appender.task_detailslog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
+
+appender.task_detailslog_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_task_detailslog-%i.log.gz
+appender.task_detailslog_rolling_old.policies.type = Policies
+appender.task_detailslog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
+appender.task_detailslog_rolling_old.policies.size.size = 1GB
+appender.task_detailslog_rolling_old.strategy.type = DefaultRolloverStrategy
+appender.task_detailslog_rolling_old.strategy.max = 4
+#################################################
+logger.task_detailslog_rolling.name = task.detailslog
+logger.task_detailslog_rolling.level = trace
+logger.task_detailslog_rolling.appenderRef.task_detailslog_rolling.ref = task_detailslog_rolling
+logger.task_detailslog_rolling.appenderRef.task_detailslog_rolling_old.ref = task_detailslog_rolling_old
+logger.task_detailslog_rolling.additivity = false
diff --git a/gradle.properties b/gradle.properties
index 86af9ad62b1a4..73df0940ce181 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -19,7 +19,7 @@ org.gradle.jvmargs=-Xmx3g -XX:+HeapDumpOnOutOfMemoryError -Xss2m \
--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED \
--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED \
--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED
-options.forkOptions.memoryMaximumSize=2g
+options.forkOptions.memoryMaximumSize=3g
# Disable duplicate project id detection
# See https://docs.gradle.org/current/userguide/upgrading_version_6.html#duplicate_project_names_may_cause_publication_to_fail
diff --git a/gradle/run.gradle b/gradle/run.gradle
index 5a1fed06c0ef7..639479e97d28f 100644
--- a/gradle/run.gradle
+++ b/gradle/run.gradle
@@ -31,9 +31,14 @@ import org.opensearch.gradle.testclusters.RunTask
apply plugin: 'opensearch.testclusters'
+def numNodes = findProperty('numNodes') as Integer ?: 1
+def numZones = findProperty('numZones') as Integer ?: 1
+
testClusters {
runTask {
testDistribution = 'archive'
+ if (numZones > 1) numberOfZones = numZones
+ if (numNodes > 1) numberOfNodes = numNodes
}
}
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 24c164f0f1e12..58e9a16f424db 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=97a52d145762adc241bad7fd18289bf7f6801e08ece6badf80402fe2b9f250b1
+distributionSha256Sum=db9c8211ed63f61f60292c69e80d89196f9eb36665e369e7f00ac4cc841c2219
diff --git a/modules/aggs-matrix-stats/build.gradle b/modules/aggs-matrix-stats/build.gradle
index dd3aee61f7664..705fa17456a79 100644
--- a/modules/aggs-matrix-stats/build.gradle
+++ b/modules/aggs-matrix-stats/build.gradle
@@ -31,7 +31,7 @@ apply plugin: 'opensearch.yaml-rest-test'
opensearchplugin {
description 'Adds aggregations whose input are a list of numeric fields and output includes a matrix.'
- classname 'org.opensearch.search.aggregations.matrix.MatrixAggregationPlugin'
+ classname 'org.opensearch.search.aggregations.matrix.MatrixAggregationModulePlugin'
hasClientJar = true
}
diff --git a/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/MatrixAggregationPlugin.java b/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/MatrixAggregationModulePlugin.java
similarity index 95%
rename from modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/MatrixAggregationPlugin.java
rename to modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/MatrixAggregationModulePlugin.java
index debeacffe321e..df1926282d500 100644
--- a/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/MatrixAggregationPlugin.java
+++ b/modules/aggs-matrix-stats/src/main/java/org/opensearch/search/aggregations/matrix/MatrixAggregationModulePlugin.java
@@ -42,7 +42,7 @@
import static java.util.Collections.singletonList;
-public class MatrixAggregationPlugin extends Plugin implements SearchPlugin {
+public class MatrixAggregationModulePlugin extends Plugin implements SearchPlugin {
@Override
public List getAggregations() {
return singletonList(
diff --git a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java
index e523c77e0786f..cc16b9b23b5d5 100644
--- a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java
+++ b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java
@@ -43,7 +43,7 @@
import org.opensearch.search.aggregations.Aggregation;
import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.ParsedAggregation;
-import org.opensearch.search.aggregations.matrix.MatrixAggregationPlugin;
+import org.opensearch.search.aggregations.matrix.MatrixAggregationModulePlugin;
import org.opensearch.search.aggregations.matrix.stats.InternalMatrixStats.Fields;
import org.opensearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import org.opensearch.test.InternalAggregationTestCase;
@@ -64,7 +64,7 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase getSearchPlugins() {
- return Collections.singletonList(new MatrixAggregationPlugin());
+ return Collections.singletonList(new MatrixAggregationModulePlugin());
}
}
diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle
index be0acf7218c1e..58ecf79cda0d7 100644
--- a/modules/analysis-common/build.gradle
+++ b/modules/analysis-common/build.gradle
@@ -32,7 +32,7 @@ apply plugin: 'opensearch.internal-cluster-test'
opensearchplugin {
description 'Adds "built in" analyzers to OpenSearch.'
- classname 'org.opensearch.analysis.common.CommonAnalysisPlugin'
+ classname 'org.opensearch.analysis.common.CommonAnalysisModulePlugin'
extendedPlugins = ['lang-painless']
}
diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
index 8c2f83bf83d85..785e597857825 100644
--- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
+++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
@@ -48,7 +48,7 @@
public class QueryStringWithAnalyzersIT extends OpenSearchIntegTestCase {
@Override
protected Collection> nodePlugins() {
- return Arrays.asList(CommonAnalysisPlugin.class);
+ return Arrays.asList(CommonAnalysisModulePlugin.class);
}
/**
diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java
similarity index 99%
rename from modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java
rename to modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java
index c69917ed52be8..57865e15d523a 100644
--- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java
+++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java
@@ -167,9 +167,9 @@
import static org.opensearch.plugins.AnalysisPlugin.requiresAnalysisSettings;
-public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, ScriptPlugin {
+public class CommonAnalysisModulePlugin extends Plugin implements AnalysisPlugin, ScriptPlugin {
- private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonAnalysisPlugin.class);
+ private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonAnalysisModulePlugin.class);
private final SetOnce scriptService = new SetOnce<>();
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java
index 1a4651dc23fff..d107977237b9e 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ASCIIFoldingTokenFilterFactoryTests.java
@@ -51,7 +51,7 @@ public void testDefault() throws IOException {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ascii_folding");
String source = "Ansprüche";
@@ -68,7 +68,7 @@ public void testPreserveOriginal() throws IOException {
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.put("index.analysis.filter.my_ascii_folding.preserve_original", true)
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ascii_folding");
String source = "Ansprüche";
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java
index 9d54776755766..829ace512b5c8 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java
@@ -60,7 +60,7 @@ public void testDefault() throws IOException {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", type)
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
@@ -78,7 +78,7 @@ public void testCatenateWords() throws IOException {
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
@@ -96,7 +96,7 @@ public void testCatenateNumbers() throws IOException {
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
@@ -115,7 +115,7 @@ public void testCatenateAll() throws IOException {
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_all", "true")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
@@ -132,7 +132,7 @@ public void testSplitOnCaseChange() throws IOException {
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
@@ -149,7 +149,7 @@ public void testPreserveOriginal() throws IOException {
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.preserve_original", "true")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
@@ -186,7 +186,7 @@ public void testStemEnglishPossessive() throws IOException {
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CJKFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CJKFilterFactoryTests.java
index f2c0d9859cbe4..2f33194125652 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CJKFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CJKFilterFactoryTests.java
@@ -52,7 +52,7 @@ public class CJKFilterFactoryTests extends OpenSearchTokenStreamTestCase {
@Before
public void setup() throws IOException {
- analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE, new CommonAnalysisPlugin());
+ analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE, new CommonAnalysisModulePlugin());
}
public void testDefault() throws IOException {
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java
index 4cf0d1de28717..1c4db089565ff 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java
@@ -50,7 +50,7 @@
public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
public CommonAnalysisFactoryTests() {
- super(new CommonAnalysisPlugin());
+ super(new CommonAnalysisModulePlugin());
}
@Override
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonGramsTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
index 04570be7a6f9e..713d61f294630 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
@@ -58,7 +58,7 @@ public void testDefault() throws IOException {
.build();
try {
- AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
Assert.fail("[common_words] or [common_words_path] is set");
} catch (IllegalArgumentException e) {} catch (IOException e) {
fail("expected IAE");
@@ -333,7 +333,7 @@ private Path createHome() throws IOException {
}
private static OpenSearchTestCase.TestAnalysis createTestAnalysisFromSettings(Settings settings) throws IOException {
- return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
}
}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java
index e5ce7c818f72b..32556db3939b8 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java
@@ -102,8 +102,8 @@ private List analyze(Settings settings, String analyzerName, String text
}
private AnalysisModule createAnalysisModule(Settings settings) throws IOException {
- CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin();
- return new AnalysisModule(TestEnvironment.newEnvironment(settings), Arrays.asList(commonAnalysisPlugin, new AnalysisPlugin() {
+ CommonAnalysisModulePlugin commonAnalysisModulePlugin = new CommonAnalysisModulePlugin();
+ return new AnalysisModule(TestEnvironment.newEnvironment(settings), Arrays.asList(commonAnalysisModulePlugin, new AnalysisPlugin() {
@Override
public Map> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java
index eaf571e7469d6..1a78690dffcf7 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java
@@ -29,7 +29,7 @@ public class ConcatenateGraphTokenFilterFactoryTests extends OpenSearchTokenStre
public void testSimpleTokenizerAndConcatenate() throws IOException {
OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("concatenate_graph");
@@ -47,7 +47,7 @@ public void testTokenizerCustomizedSeparator() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "+")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
@@ -65,7 +65,7 @@ public void testTokenizerEmptySeparator() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
@@ -83,7 +83,7 @@ public void testPreservePositionIncrementsDefault() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "+")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
@@ -106,7 +106,7 @@ public void testPreservePositionIncrementsTrue() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.token_separator", "+")
.put("index.analysis.filter.my_concatenate_graph.preserve_position_increments", "true")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
@@ -132,7 +132,7 @@ public void testGraph() throws IOException {
.put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace")
.put("index.analysis.analyzer.my_analyzer.filter", "my_word_delimiter, my_concatenate_graph")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
String source = "PowerShot Is AweSome";
@@ -166,7 +166,7 @@ public void testInvalidSeparator() {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "11")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
)
);
}
@@ -187,7 +187,7 @@ public void testMaxGraphExpansion() throws IOException {
.put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace")
.put("index.analysis.analyzer.my_analyzer.filter", "my_word_delimiter, my_concatenate_graph")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
String source = "PowerShot Is AweSome";
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java
index 35915af8f263d..9bfc3a77e8c44 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java
@@ -74,7 +74,7 @@ public class DisableGraphQueryTests extends OpenSearchSingleNodeTestCase {
@Override
protected Collection> getPlugins() {
- return Collections.singleton(CommonAnalysisPlugin.class);
+ return Collections.singleton(CommonAnalysisModulePlugin.class);
}
@Before
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java
index b3724d99f10ed..e62a9c52edc5c 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenFilterFactoryTests.java
@@ -52,7 +52,7 @@ public void testDefault() throws IOException {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_edge_ngram.type", "edge_ngram")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_edge_ngram");
String source = "foo";
@@ -69,7 +69,7 @@ public void testPreserveOriginal() throws IOException {
.put("index.analysis.filter.my_edge_ngram.type", "edge_ngram")
.put("index.analysis.filter.my_edge_ngram.preserve_original", true)
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_edge_ngram");
String source = "foo";
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java
index e77f895d05661..34fdec4135bfe 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java
@@ -60,7 +60,7 @@ private IndexAnalyzers buildAnalyzers(Version version, String tokenizer) throws
.put("index.analysis.analyzer.my_analyzer.tokenizer", tokenizer)
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
- return new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(new CommonAnalysisPlugin()))
+ return new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(new CommonAnalysisModulePlugin()))
.getAnalysisRegistry()
.build(idxSettings);
}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ElisionFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ElisionFilterFactoryTests.java
index 164068eab5e1f..fc5c9ce49bbc9 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ElisionFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ElisionFilterFactoryTests.java
@@ -49,7 +49,7 @@ public void testElisionFilterWithNoArticles() throws IOException {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
- () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())
+ () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin())
);
assertEquals("elision filter requires [articles] or [articles_path] setting", e.getMessage());
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
index 57c959a4f0b65..74ed3cd79e753 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java
@@ -65,7 +65,7 @@
public class HighlighterWithAnalyzersTests extends OpenSearchIntegTestCase {
@Override
protected Collection> nodePlugins() {
- return Arrays.asList(CommonAnalysisPlugin.class);
+ return Arrays.asList(CommonAnalysisModulePlugin.class);
}
public void testNgramHighlightingWithBrokenPositions() throws IOException {
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java
index 0b094e52df8a1..41f27cd8b9136 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepFilterFactoryTests.java
@@ -54,7 +54,7 @@ public void testLoadWithoutSettings() throws IOException {
OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(
createTempDir(),
RESOURCE,
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep");
Assert.assertNull(tokenFilter);
@@ -68,7 +68,7 @@ public void testLoadOverConfiguredSettings() {
.put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]")
.build();
try {
- AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
Assert.fail("path and array are configured");
} catch (IllegalArgumentException e) {} catch (IOException e) {
fail("expected IAE");
@@ -83,7 +83,7 @@ public void testKeepWordsPathSettings() {
.build();
try {
// test our none existing setup is picked up
- AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
fail("expected an exception due to non existent keep_words_path");
} catch (IllegalArgumentException e) {} catch (IOException e) {
fail("expected IAE");
@@ -92,7 +92,7 @@ public void testKeepWordsPathSettings() {
settings = Settings.builder().put(settings).putList("index.analysis.filter.non_broken_keep_filter.keep_words", "test").build();
try {
// test our none existing setup is picked up
- AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
fail("expected an exception indicating that you can't use [keep_words_path] with [keep_words] ");
} catch (IllegalArgumentException e) {} catch (IOException e) {
fail("expected IAE");
@@ -104,7 +104,7 @@ public void testCaseInsensitiveMapping() throws IOException {
OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(
createTempDir(),
RESOURCE,
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_keep_filter");
assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class));
@@ -119,7 +119,7 @@ public void testCaseSensitiveMapping() throws IOException {
OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(
createTempDir(),
RESOURCE,
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_case_sensitive_keep_filter");
assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class));
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java
index 1f1021b4bfe66..eaab746be26dc 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeepTypesFilterFactoryTests.java
@@ -63,7 +63,10 @@ public void testKeepTypesInclude() throws IOException {
);
}
Settings settings = settingsBuilder.build();
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
+ settings,
+ new CommonAnalysisModulePlugin()
+ );
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers");
assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class));
String source = "Hello 123 world";
@@ -80,7 +83,10 @@ public void testKeepTypesExclude() throws IOException {
.putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" })
.put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, KeepTypesFilterFactory.KeepTypesMode.EXCLUDE)
.build();
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
+ settings,
+ new CommonAnalysisModulePlugin()
+ );
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers");
assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class));
String source = "Hello 123 world";
@@ -99,7 +105,7 @@ public void testKeepTypesException() throws IOException {
.build();
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
- () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())
+ () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin())
);
assertEquals("`keep_types` tokenfilter mode can only be [include] or [exclude] but was [bad_parameter].", ex.getMessage());
}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java
index 40e354785ddbe..f9c5a25444ed0 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/KeywordMarkerFilterFactoryTests.java
@@ -65,7 +65,7 @@ public void testKeywordSet() throws IOException {
.put("index.analysis.analyzer.my_keyword.filter", "my_keyword, porter_stem")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_keyword");
assertThat(tokenFilter, instanceOf(KeywordMarkerTokenFilterFactory.class));
TokenStream filter = tokenFilter.create(new WhitespaceTokenizer());
@@ -87,7 +87,7 @@ public void testKeywordPattern() throws IOException {
.put("index.analysis.analyzer.my_keyword.filter", "my_keyword, porter_stem")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_keyword");
assertThat(tokenFilter, instanceOf(KeywordMarkerTokenFilterFactory.class));
TokenStream filter = tokenFilter.create(new WhitespaceTokenizer());
@@ -112,7 +112,7 @@ public void testCannotSpecifyBothKeywordsAndPattern() throws IOException {
.build();
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
- () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())
+ () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin())
);
assertEquals("cannot specify both `keywords_pattern` and `keywords` or `keywords_path`", e.getMessage());
}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java
index 76471fd98e5fe..99708045b0be2 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/LimitTokenCountFilterFactoryTests.java
@@ -119,7 +119,7 @@ public void testSettings() throws IOException {
}
private static OpenSearchTestCase.TestAnalysis createTestAnalysisFromSettings(Settings settings) throws IOException {
- return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
}
}
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java
index 390e36c4ca0a0..41f60e1264b5c 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MassiveWordListTests.java
@@ -43,7 +43,7 @@ public class MassiveWordListTests extends OpenSearchSingleNodeTestCase {
@Override
protected Collection> getPlugins() {
- return Collections.singleton(CommonAnalysisPlugin.class);
+ return Collections.singleton(CommonAnalysisModulePlugin.class);
}
public void testCreateIndexWithMassiveWordList() {
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java
index 514c53f17456c..e86a939dc857b 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java
@@ -50,7 +50,10 @@ public void testDefault() throws IOException {
int default_bucket_size = 512;
int default_hash_set_size = 1;
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
+ settings,
+ new CommonAnalysisModulePlugin()
+ );
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("min_hash");
String source = "the quick brown fox";
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -70,7 +73,10 @@ public void testSettings() throws IOException {
.put("index.analysis.filter.test_min_hash.with_rotation", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
+ settings,
+ new CommonAnalysisModulePlugin()
+ );
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("test_min_hash");
String source = "sushi";
Tokenizer tokenizer = new WhitespaceTokenizer();
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java
index 167f61464da1b..e9dfa299871e5 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MultiplexerTokenFilterTests.java
@@ -65,7 +65,7 @@ public void testMultiplexingFilter() throws IOException {
IndexAnalyzers indexAnalyzers = new AnalysisModule(
TestEnvironment.newEnvironment(settings),
- Collections.singletonList(new CommonAnalysisPlugin())
+ Collections.singletonList(new CommonAnalysisModulePlugin())
).getAnalysisRegistry().build(idxSettings);
try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) {
@@ -99,7 +99,7 @@ public void testMultiplexingNoOriginal() throws IOException {
IndexAnalyzers indexAnalyzers = new AnalysisModule(
TestEnvironment.newEnvironment(settings),
- Collections.singletonList(new CommonAnalysisPlugin())
+ Collections.singletonList(new CommonAnalysisModulePlugin())
).getAnalysisRegistry().build(idxSettings);
try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) {
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java
index 85090648096d1..e5f558b1c2fdd 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenFilterFactoryTests.java
@@ -52,7 +52,7 @@ public void testDefault() throws IOException {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_ngram.type", "ngram")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ngram");
String source = "foo";
@@ -69,7 +69,7 @@ public void testPreserveOriginal() throws IOException {
.put("index.analysis.filter.my_ngram.type", "ngram")
.put("index.analysis.filter.my_ngram.preserve_original", true)
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ngram");
String source = "foo";
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java
index 5cd18a5b01f18..a3dc75fd37671 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PatternCaptureTokenFilterTests.java
@@ -55,7 +55,7 @@ public void testPatternCaptureTokenFilter() throws Exception {
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
- IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers;
+ IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisModulePlugin()).indexAnalyzers;
NamedAnalyzer analyzer1 = indexAnalyzers.get("single");
assertTokenStreamContents(analyzer1.tokenStream("test", "foobarbaz"), new String[] { "foobarbaz", "foobar", "foo" });
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java
index c16f4f37846ec..b31f4020ef627 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/PredicateTokenScriptFilterTests.java
@@ -81,7 +81,7 @@ public FactoryType compile(Script script, ScriptContext FactoryType compile(Script script, ScriptContext { indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; }
+ () -> { indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisModulePlugin()).indexAnalyzers; }
);
}
@@ -259,7 +259,7 @@ public void testTokenFiltersBypassSynonymAnalysis() throws IOException {
String[] bypassingFactories = new String[] { "dictionary_decompounder" };
- CommonAnalysisPlugin plugin = new CommonAnalysisPlugin();
+ CommonAnalysisModulePlugin plugin = new CommonAnalysisModulePlugin();
for (String factory : bypassingFactories) {
TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings);
TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings);
@@ -294,7 +294,7 @@ public void testPreconfiguredTokenFilters() throws IOException {
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
- CommonAnalysisPlugin plugin = new CommonAnalysisPlugin();
+ CommonAnalysisModulePlugin plugin = new CommonAnalysisModulePlugin();
for (PreConfiguredTokenFilter tf : plugin.getPreConfiguredTokenFilters()) {
if (disallowedFilters.contains(tf.getName())) {
@@ -319,7 +319,7 @@ public void testDisallowedTokenFilters() throws IOException {
.put("output_unigrams", "true")
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
- CommonAnalysisPlugin plugin = new CommonAnalysisPlugin();
+ CommonAnalysisModulePlugin plugin = new CommonAnalysisModulePlugin();
String[] disallowedFactories = new String[] {
"multiplexer",
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java
index 3ea9c526052f2..a5419df92db07 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/TrimTokenFilterTests.java
@@ -49,7 +49,10 @@ public void testNormalizer() throws IOException {
.putList("index.analysis.normalizer.my_normalizer.filter", "trim")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
+ settings,
+ new CommonAnalysisModulePlugin()
+ );
assertNull(analysis.indexAnalyzers.get("my_normalizer"));
NamedAnalyzer normalizer = analysis.indexAnalyzers.getNormalizer("my_normalizer");
assertNotNull(normalizer);
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java
index 102182f381128..7a717fe7fe22e 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java
@@ -64,7 +64,7 @@ public void testMultiTerms() throws IOException {
.put("index.analysis.filter.my_word_delimiter.catenate_all", "true")
.put("index.analysis.filter.my_word_delimiter.preserve_original", "true")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
@@ -114,7 +114,7 @@ public void testPartsAndCatenate() throws IOException {
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
@@ -146,7 +146,7 @@ public void testAdjustingOffsets() throws IOException {
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
.put("index.analysis.filter.my_word_delimiter.adjust_offsets", "false")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
@@ -181,7 +181,10 @@ public void testIgnoreKeywords() throws IOException {
.put("index.analysis.analyzer.my_analyzer.filter", "my_keyword, my_word_delimiter")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
+ settings,
+ new CommonAnalysisModulePlugin()
+ );
String source = "PowerShot PowerHungry";
int[] expectedStartOffsets = new int[] { 0, 5, 10, 15 };
int[] expectedEndOffsets = new int[] { 5, 9, 15, 21 };
@@ -191,7 +194,7 @@ public void testIgnoreKeywords() throws IOException {
// test with keywords but ignore_keywords is set as true
settings = Settings.builder().put(settings).put("index.analysis.filter.my_word_delimiter.ignore_keywords", "true").build();
- analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
+ analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
analyzer = analysis.indexAnalyzers.get("my_analyzer");
expectedStartOffsets = new int[] { 0, 5, 10 };
expectedEndOffsets = new int[] { 5, 9, 21 };
@@ -213,7 +216,7 @@ public void testPreconfiguredFilter() throws IOException {
try (
IndexAnalyzers indexAnalyzers = new AnalysisModule(
TestEnvironment.newEnvironment(settings),
- Collections.singletonList(new CommonAnalysisPlugin())
+ Collections.singletonList(new CommonAnalysisModulePlugin())
).getAnalysisRegistry().build(idxSettings)
) {
diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java
index ea37fd5ce9546..b6e064f72630a 100644
--- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactoryTests.java
@@ -58,7 +58,7 @@ public void testPartsAndCatenate() throws IOException {
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
.build(),
- new CommonAnalysisPlugin()
+ new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
diff --git a/modules/geo/build.gradle b/modules/geo/build.gradle
index d78e83ec7c4c6..0b8e623c24ac6 100644
--- a/modules/geo/build.gradle
+++ b/modules/geo/build.gradle
@@ -28,10 +28,11 @@
* under the License.
*/
apply plugin: 'opensearch.yaml-rest-test'
+apply plugin: 'opensearch.internal-cluster-test'
opensearchplugin {
- description 'Placeholder plugin for geospatial features in OpenSearch. only registers geo_shape field mapper for now'
- classname 'org.opensearch.geo.GeoPlugin'
+ description 'Plugin for geospatial features in OpenSearch. Registering the geo_shape and aggregations GeoBounds on Geo_Shape and Geo_Point'
+ classname 'org.opensearch.geo.GeoModulePlugin'
}
restResources {
@@ -42,4 +43,3 @@ restResources {
artifacts {
restTests(project.file('src/yamlRestTest/resources/rest-api-spec/test'))
}
-test.enabled = false
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java
new file mode 100644
index 0000000000000..7dc6f2c1b89b7
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java
@@ -0,0 +1,47 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo;
+
+import org.opensearch.index.mapper.GeoShapeFieldMapper;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.TestGeoShapeFieldMapperPlugin;
+
+import java.util.Collection;
+import java.util.Collections;
+
+/**
+ * This is the base class for all the Geo related integration tests. Use this class to add the features and settings
+ * for the test cluster on which integration tests are running.
+ */
+public abstract class GeoModulePluginIntegTestCase extends OpenSearchIntegTestCase {
+ /**
+ * Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this
+ * geo plugin is not getting packaged in a zip, we need to load it before the tests run.
+ *
+ * @return List of {@link Plugin}
+ */
+ @Override
+ protected Collection> nodePlugins() {
+ return Collections.singletonList(GeoModulePlugin.class);
+ }
+
+ /**
+ * This was added as a backdoor to Mock the implementation of {@link GeoShapeFieldMapper} which was coming from
+ * {@link GeoModulePlugin}. Mock implementation is {@link TestGeoShapeFieldMapperPlugin}. Now we are using the
+ * {@link GeoModulePlugin} in our integration tests we need to override this functionality to avoid multiple mapper
+ * error.
+ *
+ * @return boolean
+ */
+ @Override
+ protected boolean addMockGeoShapeFieldMapper() {
+ return false;
+ }
+}
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java
new file mode 100644
index 0000000000000..2ac73728b2dab
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java
@@ -0,0 +1,59 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.geo.GeoModulePluginIntegTestCase;
+import org.opensearch.geo.search.aggregations.metrics.GeoBounds;
+import org.opensearch.geo.tests.common.AggregationBuilders;
+import org.opensearch.test.OpenSearchIntegTestCase;
+
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+import static org.hamcrest.Matchers.closeTo;
+
+@OpenSearchIntegTestCase.SuiteScopeTestCase
+public class MissingValueIT extends GeoModulePluginIntegTestCase {
+
+ @Override
+ protected void setupSuiteScopeCluster() throws Exception {
+ assertAcked(prepareCreate("idx").setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get());
+ indexRandom(
+ true,
+ client().prepareIndex("idx").setId("1").setSource(),
+ client().prepareIndex("idx")
+ .setId("2")
+ .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2")
+ );
+ }
+
+ public void testUnmappedGeoBounds() {
+ SearchResponse response = client().prepareSearch("idx")
+ .addAggregation(AggregationBuilders.geoBounds("bounds").field("non_existing_field").missing("2,1"))
+ .get();
+ assertSearchResponse(response);
+ GeoBounds bounds = response.getAggregations().get("bounds");
+ assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5));
+ assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5));
+ assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5));
+ assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5));
+ }
+
+ public void testGeoBounds() {
+ SearchResponse response = client().prepareSearch("idx")
+ .addAggregation(AggregationBuilders.geoBounds("bounds").field("location").missing("2,1"))
+ .get();
+ assertSearchResponse(response);
+ GeoBounds bounds = response.getAggregations().get("bounds");
+ assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5));
+ assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5));
+ assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5));
+ assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5));
+ }
+}
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorTestCaseModulePlugin.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorTestCaseModulePlugin.java
new file mode 100644
index 0000000000000..0065cca7d6101
--- /dev/null
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorTestCaseModulePlugin.java
@@ -0,0 +1,295 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search.aggregations.metrics;
+
+import com.carrotsearch.hppc.ObjectIntHashMap;
+import com.carrotsearch.hppc.ObjectIntMap;
+import com.carrotsearch.hppc.ObjectObjectHashMap;
+import com.carrotsearch.hppc.ObjectObjectMap;
+import org.opensearch.action.index.IndexRequestBuilder;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.common.Strings;
+import org.opensearch.common.document.DocumentField;
+import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.xcontent.ToXContent;
+import org.opensearch.common.xcontent.XContentBuilder;
+import org.opensearch.common.xcontent.XContentFactory;
+import org.opensearch.geo.GeoModulePluginIntegTestCase;
+import org.opensearch.geo.tests.common.RandomGeoGenerator;
+import org.opensearch.geometry.utils.Geohash;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.sort.SortBuilders;
+import org.opensearch.search.sort.SortOrder;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+
+/**
+ * This is base class for all Geo Aggregations Integration Tests. This class is similar to what we have in the server
+ * folder of the OpenSearch repo. As part of moving the Geo based aggregation into separate module and plugin we need
+ * to copy the code as we cannot depend on this class.
+ * GitHub issue
+ */
+public abstract class AbstractGeoAggregatorTestCaseModulePlugin extends GeoModulePluginIntegTestCase {
+
+ protected static final String SINGLE_VALUED_FIELD_NAME = "geo_value";
+ protected static final String MULTI_VALUED_FIELD_NAME = "geo_values";
+ protected static final String NUMBER_FIELD_NAME = "l_values";
+ protected static final String UNMAPPED_IDX_NAME = "idx_unmapped";
+ protected static final String IDX_NAME = "idx";
+ protected static final String EMPTY_IDX_NAME = "empty_idx";
+ protected static final String DATELINE_IDX_NAME = "dateline_idx";
+ protected static final String HIGH_CARD_IDX_NAME = "high_card_idx";
+ protected static final String IDX_ZERO_NAME = "idx_zero";
+
+ protected static int numDocs;
+ protected static int numUniqueGeoPoints;
+ protected static GeoPoint[] singleValues, multiValues;
+ protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid,
+ unmappedCentroid;
+ protected static ObjectIntMap expectedDocCountsForGeoHash = null;
+ protected static ObjectObjectMap expectedCentroidsForGeoHash = null;
+ protected static final double GEOHASH_TOLERANCE = 1E-5D;
+
+ @Override
+ public void setupSuiteScopeCluster() throws Exception {
+ createIndex(UNMAPPED_IDX_NAME);
+ assertAcked(
+ prepareCreate(IDX_NAME).setMapping(
+ SINGLE_VALUED_FIELD_NAME,
+ "type=geo_point",
+ MULTI_VALUED_FIELD_NAME,
+ "type=geo_point",
+ NUMBER_FIELD_NAME,
+ "type=long",
+ "tag",
+ "type=keyword"
+ )
+ );
+
+ singleTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
+ singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
+ multiTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
+ multiBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
+ singleCentroid = new GeoPoint(0, 0);
+ multiCentroid = new GeoPoint(0, 0);
+ unmappedCentroid = new GeoPoint(0, 0);
+
+ numDocs = randomIntBetween(6, 20);
+ numUniqueGeoPoints = randomIntBetween(1, numDocs);
+ expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
+ expectedCentroidsForGeoHash = new ObjectObjectHashMap<>(numDocs * 2);
+
+ singleValues = new GeoPoint[numUniqueGeoPoints];
+ for (int i = 0; i < singleValues.length; i++) {
+ singleValues[i] = RandomGeoGenerator.randomPoint(random());
+ updateBoundsTopLeft(singleValues[i], singleTopLeft);
+ updateBoundsBottomRight(singleValues[i], singleBottomRight);
+ }
+
+ multiValues = new GeoPoint[numUniqueGeoPoints];
+ for (int i = 0; i < multiValues.length; i++) {
+ multiValues[i] = RandomGeoGenerator.randomPoint(random());
+ updateBoundsTopLeft(multiValues[i], multiTopLeft);
+ updateBoundsBottomRight(multiValues[i], multiBottomRight);
+ }
+
+ List builders = new ArrayList<>();
+
+ GeoPoint singleVal;
+ final GeoPoint[] multiVal = new GeoPoint[2];
+ double newMVLat, newMVLon;
+ for (int i = 0; i < numDocs; i++) {
+ singleVal = singleValues[i % numUniqueGeoPoints];
+ multiVal[0] = multiValues[i % numUniqueGeoPoints];
+ multiVal[1] = multiValues[(i + 1) % numUniqueGeoPoints];
+ builders.add(
+ client().prepareIndex(IDX_NAME)
+ .setSource(
+ jsonBuilder().startObject()
+ .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat())
+ .startArray(MULTI_VALUED_FIELD_NAME)
+ .startArray()
+ .value(multiVal[0].lon())
+ .value(multiVal[0].lat())
+ .endArray()
+ .startArray()
+ .value(multiVal[1].lon())
+ .value(multiVal[1].lat())
+ .endArray()
+ .endArray()
+ .field(NUMBER_FIELD_NAME, i)
+ .field("tag", "tag" + i)
+ .endObject()
+ )
+ );
+ singleCentroid = singleCentroid.reset(
+ singleCentroid.lat() + (singleVal.lat() - singleCentroid.lat()) / (i + 1),
+ singleCentroid.lon() + (singleVal.lon() - singleCentroid.lon()) / (i + 1)
+ );
+ newMVLat = (multiVal[0].lat() + multiVal[1].lat()) / 2d;
+ newMVLon = (multiVal[0].lon() + multiVal[1].lon()) / 2d;
+ multiCentroid = multiCentroid.reset(
+ multiCentroid.lat() + (newMVLat - multiCentroid.lat()) / (i + 1),
+ multiCentroid.lon() + (newMVLon - multiCentroid.lon()) / (i + 1)
+ );
+ }
+
+ assertAcked(prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point"));
+
+ assertAcked(
+ prepareCreate(DATELINE_IDX_NAME).setMapping(
+ SINGLE_VALUED_FIELD_NAME,
+ "type=geo_point",
+ MULTI_VALUED_FIELD_NAME,
+ "type=geo_point",
+ NUMBER_FIELD_NAME,
+ "type=long",
+ "tag",
+ "type=keyword"
+ )
+ );
+
+ GeoPoint[] geoValues = new GeoPoint[5];
+ geoValues[0] = new GeoPoint(38, 178);
+ geoValues[1] = new GeoPoint(12, -179);
+ geoValues[2] = new GeoPoint(-24, 170);
+ geoValues[3] = new GeoPoint(32, -175);
+ geoValues[4] = new GeoPoint(-11, 178);
+
+ for (int i = 0; i < 5; i++) {
+ builders.add(
+ client().prepareIndex(DATELINE_IDX_NAME)
+ .setSource(
+ jsonBuilder().startObject()
+ .array(SINGLE_VALUED_FIELD_NAME, geoValues[i].lon(), geoValues[i].lat())
+ .field(NUMBER_FIELD_NAME, i)
+ .field("tag", "tag" + i)
+ .endObject()
+ )
+ );
+ }
+ assertAcked(
+ prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2))
+ .setMapping(
+ SINGLE_VALUED_FIELD_NAME,
+ "type=geo_point",
+ MULTI_VALUED_FIELD_NAME,
+ "type=geo_point",
+ NUMBER_FIELD_NAME,
+ "type=long,store=true",
+ "tag",
+ "type=keyword"
+ )
+ );
+
+ for (int i = 0; i < 2000; i++) {
+ singleVal = singleValues[i % numUniqueGeoPoints];
+ builders.add(
+ client().prepareIndex(HIGH_CARD_IDX_NAME)
+ .setSource(
+ jsonBuilder().startObject()
+ .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat())
+ .startArray(MULTI_VALUED_FIELD_NAME)
+ .startArray()
+ .value(multiValues[i % numUniqueGeoPoints].lon())
+ .value(multiValues[i % numUniqueGeoPoints].lat())
+ .endArray()
+ .startArray()
+ .value(multiValues[(i + 1) % numUniqueGeoPoints].lon())
+ .value(multiValues[(i + 1) % numUniqueGeoPoints].lat())
+ .endArray()
+ .endArray()
+ .field(NUMBER_FIELD_NAME, i)
+ .field("tag", "tag" + i)
+ .endObject()
+ )
+ );
+ updateGeohashBucketsCentroid(singleVal);
+ }
+
+ builders.add(
+ client().prepareIndex(IDX_ZERO_NAME)
+ .setSource(jsonBuilder().startObject().array(SINGLE_VALUED_FIELD_NAME, 0.0, 1.0).endObject())
+ );
+ assertAcked(prepareCreate(IDX_ZERO_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point"));
+
+ indexRandom(true, builders);
+ ensureSearchable();
+
+ // Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same
+ // value for NUMBER_FIELD_NAME. This will check that after random indexing each document only has 1 value for
+ // NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting
+ // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type.
+ SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME)
+ .addStoredField(NUMBER_FIELD_NAME)
+ .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC))
+ .setSize(5000)
+ .get();
+ assertSearchResponse(response);
+ long totalHits = response.getHits().getTotalHits().value;
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+ response.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder));
+ for (int i = 0; i < totalHits; i++) {
+ SearchHit searchHit = response.getHits().getAt(i);
+ assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx"));
+ DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME);
+
+ assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1));
+ Long value = hitField.getValue();
+ assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i));
+ }
+ assertThat(totalHits, equalTo(2000L));
+ }
+
+ private void updateGeohashBucketsCentroid(final GeoPoint location) {
+ String hash = Geohash.stringEncode(location.lon(), location.lat(), Geohash.PRECISION);
+ for (int precision = Geohash.PRECISION; precision > 0; --precision) {
+ final String h = hash.substring(0, precision);
+ expectedDocCountsForGeoHash.put(h, expectedDocCountsForGeoHash.getOrDefault(h, 0) + 1);
+ expectedCentroidsForGeoHash.put(h, updateHashCentroid(h, location));
+ }
+ }
+
+ private GeoPoint updateHashCentroid(String hash, final GeoPoint location) {
+ GeoPoint centroid = expectedCentroidsForGeoHash.getOrDefault(hash, null);
+ if (centroid == null) {
+ return new GeoPoint(location.lat(), location.lon());
+ }
+ final int docCount = expectedDocCountsForGeoHash.get(hash);
+ final double newLon = centroid.lon() + (location.lon() - centroid.lon()) / docCount;
+ final double newLat = centroid.lat() + (location.lat() - centroid.lat()) / docCount;
+ return centroid.reset(newLat, newLon);
+ }
+
+ private void updateBoundsBottomRight(GeoPoint geoPoint, GeoPoint currentBound) {
+ if (geoPoint.lat() < currentBound.lat()) {
+ currentBound.resetLat(geoPoint.lat());
+ }
+ if (geoPoint.lon() > currentBound.lon()) {
+ currentBound.resetLon(geoPoint.lon());
+ }
+ }
+
+ private void updateBoundsTopLeft(GeoPoint geoPoint, GeoPoint currentBound) {
+ if (geoPoint.lat() > currentBound.lat()) {
+ currentBound.resetLat(geoPoint.lat());
+ }
+ if (geoPoint.lon() < currentBound.lon()) {
+ currentBound.resetLon(geoPoint.lon());
+ }
+ }
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoBoundsIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsIT.java
similarity index 97%
rename from server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoBoundsIT.java
rename to modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsIT.java
index 3af3b9e5212f8..5cbd98a4936e4 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoBoundsIT.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsIT.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.geo.GeoPoint;
@@ -43,21 +43,21 @@
import java.util.List;
-import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
-import static org.opensearch.search.aggregations.AggregationBuilders.geoBounds;
-import static org.opensearch.search.aggregations.AggregationBuilders.global;
-import static org.opensearch.search.aggregations.AggregationBuilders.terms;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.allOf;
-import static org.hamcrest.Matchers.closeTo;
-import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
-import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.sameInstance;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.closeTo;
+import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
+import static org.opensearch.search.aggregations.AggregationBuilders.global;
+import static org.opensearch.search.aggregations.AggregationBuilders.terms;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds;
@OpenSearchIntegTestCase.SuiteScopeTestCase
-public class GeoBoundsIT extends AbstractGeoTestCase {
+public class GeoBoundsIT extends AbstractGeoAggregatorTestCaseModulePlugin {
private static final String aggName = "geoBounds";
public void testSingleValuedField() throws Exception {
@@ -226,7 +226,8 @@ public void testSingleValuedFieldNearDateLineWrapLongitude() throws Exception {
}
/**
- * This test forces the {@link GeoBoundsAggregator} to resize the {@link BigArray}s it uses to ensure they are resized correctly
+ * This test forces the {@link GeoBoundsAggregator} to resize the {@link BigArray}s it uses to ensure they are
+ * resized correctly
*/
public void testSingleValuedFieldAsSubAggToHighCardTermsAgg() {
SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME)
diff --git a/modules/geo/src/main/java/org/opensearch/geo/GeoPlugin.java b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java
similarity index 63%
rename from modules/geo/src/main/java/org/opensearch/geo/GeoPlugin.java
rename to modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java
index 9b898da33bb12..64aac66b7eef3 100644
--- a/modules/geo/src/main/java/org/opensearch/geo/GeoPlugin.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java
@@ -32,18 +32,36 @@
package org.opensearch.geo;
+import org.opensearch.geo.search.aggregations.metrics.GeoBounds;
+import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder;
+import org.opensearch.geo.search.aggregations.metrics.InternalGeoBounds;
import org.opensearch.index.mapper.GeoShapeFieldMapper;
import org.opensearch.index.mapper.Mapper;
import org.opensearch.plugins.MapperPlugin;
import org.opensearch.plugins.Plugin;
+import org.opensearch.plugins.SearchPlugin;
import java.util.Collections;
+import java.util.List;
import java.util.Map;
-public class GeoPlugin extends Plugin implements MapperPlugin {
+public class GeoModulePlugin extends Plugin implements MapperPlugin, SearchPlugin {
@Override
public Map getMappers() {
return Collections.singletonMap(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser());
}
+
+ /**
+ * Registering {@link GeoBounds} aggregation on GeoPoint field.
+ */
+ @Override
+ public List getAggregations() {
+ final AggregationSpec spec = new AggregationSpec(
+ GeoBoundsAggregationBuilder.NAME,
+ GeoBoundsAggregationBuilder::new,
+ GeoBoundsAggregationBuilder.PARSER
+ ).addResultReader(InternalGeoBounds::new).setAggregatorRegistrar(GeoBoundsAggregationBuilder::registerAggregators);
+ return Collections.singletonList(spec);
+ }
}
diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoBoundsAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoBoundsAggregator.java
new file mode 100644
index 0000000000000..4a39fa1da04eb
--- /dev/null
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoBoundsAggregator.java
@@ -0,0 +1,128 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search.aggregations.metrics;
+
+import org.opensearch.common.lease.Releasables;
+import org.opensearch.common.util.BigArrays;
+import org.opensearch.common.util.DoubleArray;
+import org.opensearch.search.aggregations.Aggregator;
+import org.opensearch.search.aggregations.InternalAggregation;
+import org.opensearch.search.aggregations.metrics.MetricsAggregator;
+import org.opensearch.search.aggregations.support.ValuesSource;
+import org.opensearch.search.aggregations.support.ValuesSourceConfig;
+import org.opensearch.search.internal.SearchContext;
+
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * Abstract class for doing the {@link GeoBounds} Aggregation over fields of type geo_shape and geo_point.
+ *
+ * @param Class extending the {@link ValuesSource} which will provide the data on which aggregation will happen.
+ * @opensearch.internal
+ */
+public abstract class AbstractGeoBoundsAggregator extends MetricsAggregator {
+
+ protected final T valuesSource;
+ protected final boolean wrapLongitude;
+ protected DoubleArray tops;
+ protected DoubleArray bottoms;
+ protected DoubleArray posLefts;
+ protected DoubleArray posRights;
+ protected DoubleArray negLefts;
+ protected DoubleArray negRights;
+
+ @SuppressWarnings("unchecked")
+ protected AbstractGeoBoundsAggregator(
+ String name,
+ SearchContext searchContext,
+ Aggregator aggregator,
+ ValuesSourceConfig valuesSourceConfig,
+ boolean wrapLongitude,
+ Map metaData
+ ) throws IOException {
+ super(name, searchContext, aggregator, metaData);
+ this.wrapLongitude = wrapLongitude;
+ valuesSource = valuesSourceConfig.hasValues() ? (T) valuesSourceConfig.getValuesSource() : null;
+
+ if (valuesSource != null) {
+ final BigArrays bigArrays = context.bigArrays();
+ tops = bigArrays.newDoubleArray(1, false);
+ tops.fill(0, tops.size(), Double.NEGATIVE_INFINITY);
+ bottoms = bigArrays.newDoubleArray(1, false);
+ bottoms.fill(0, bottoms.size(), Double.POSITIVE_INFINITY);
+ posLefts = bigArrays.newDoubleArray(1, false);
+ posLefts.fill(0, posLefts.size(), Double.POSITIVE_INFINITY);
+ posRights = bigArrays.newDoubleArray(1, false);
+ posRights.fill(0, posRights.size(), Double.NEGATIVE_INFINITY);
+ negLefts = bigArrays.newDoubleArray(1, false);
+ negLefts.fill(0, negLefts.size(), Double.POSITIVE_INFINITY);
+ negRights = bigArrays.newDoubleArray(1, false);
+ negRights.fill(0, negRights.size(), Double.NEGATIVE_INFINITY);
+ }
+ }
+
+ /**
+ * Build an empty aggregation.
+ */
+ @Override
+ public InternalAggregation buildEmptyAggregation() {
+ return new InternalGeoBounds(
+ name,
+ Double.NEGATIVE_INFINITY,
+ Double.POSITIVE_INFINITY,
+ Double.POSITIVE_INFINITY,
+ Double.NEGATIVE_INFINITY,
+ Double.POSITIVE_INFINITY,
+ Double.NEGATIVE_INFINITY,
+ wrapLongitude,
+ metadata()
+ );
+ }
+
+ /**
+ * Build an aggregation for data that has been collected into owningBucketOrd.
+ */
+ @Override
+ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
+ if (valuesSource == null) {
+ return buildEmptyAggregation();
+ }
+ double top = tops.get(owningBucketOrdinal);
+ double bottom = bottoms.get(owningBucketOrdinal);
+ double posLeft = posLefts.get(owningBucketOrdinal);
+ double posRight = posRights.get(owningBucketOrdinal);
+ double negLeft = negLefts.get(owningBucketOrdinal);
+ double negRight = negRights.get(owningBucketOrdinal);
+ return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, metadata());
+ }
+
+ @Override
+ public void doClose() {
+ Releasables.close(tops, bottoms, posLefts, posRights, negLefts, negRights);
+ }
+
+ protected void setBucketSize(final long bucket, final BigArrays bigArrays) {
+ if (bucket >= tops.size()) {
+ long from = tops.size();
+ tops = bigArrays.grow(tops, bucket + 1);
+ tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY);
+ bottoms = bigArrays.resize(bottoms, tops.size());
+ bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY);
+ posLefts = bigArrays.resize(posLefts, tops.size());
+ posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY);
+ posRights = bigArrays.resize(posRights, tops.size());
+ posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY);
+ negLefts = bigArrays.resize(negLefts, tops.size());
+ negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY);
+ negRights = bigArrays.resize(negRights, tops.size());
+ negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY);
+ }
+ }
+}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBounds.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBounds.java
similarity index 96%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBounds.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBounds.java
index 380fbce85ada7..81ef502dda130 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBounds.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBounds.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.search.aggregations.Aggregation;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
similarity index 93%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
index 64e27fa7e13d1..b2c441f9a951c 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
@@ -30,8 +30,9 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
+import org.opensearch.common.ParseField;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.ObjectParser;
@@ -40,6 +41,7 @@
import org.opensearch.search.aggregations.AggregationBuilder;
import org.opensearch.search.aggregations.AggregatorFactories;
import org.opensearch.search.aggregations.AggregatorFactory;
+import org.opensearch.search.aggregations.metrics.GeoBoundsAggregatorSupplier;
import org.opensearch.search.aggregations.support.CoreValuesSourceType;
import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.opensearch.search.aggregations.support.ValuesSourceConfig;
@@ -57,6 +59,7 @@
*/
public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder {
public static final String NAME = "geo_bounds";
+ private static final ParseField WRAP_LONGITUDE_FIELD = new ParseField("wrap_longitude");
public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(
NAME,
GeoBoundsAggregatorSupplier.class
@@ -68,7 +71,7 @@ public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<
);
static {
ValuesSourceAggregationBuilder.declareFields(PARSER, false, false, false);
- PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
+ PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, WRAP_LONGITUDE_FIELD);
}
public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
@@ -121,13 +124,6 @@ public GeoBoundsAggregationBuilder wrapLongitude(boolean wrapLongitude) {
return this;
}
- /**
- * Get whether to wrap longitudes.
- */
- public boolean wrapLongitude() {
- return wrapLongitude;
- }
-
@Override
public BucketCardinality bucketCardinality() {
return BucketCardinality.NONE;
@@ -145,7 +141,7 @@ protected GeoBoundsAggregatorFactory innerBuild(
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
- builder.field(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD.getPreferredName(), wrapLongitude);
+ builder.field(WRAP_LONGITUDE_FIELD.getPreferredName(), wrapLongitude);
return builder;
}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregator.java
similarity index 51%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregator.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregator.java
index 054e8d4cb1c6c..a6518ea702be6 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregator.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregator.java
@@ -30,17 +30,13 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.apache.lucene.index.LeafReaderContext;
-import org.opensearch.common.ParseField;
import org.opensearch.common.geo.GeoPoint;
-import org.opensearch.common.lease.Releasables;
import org.opensearch.common.util.BigArrays;
-import org.opensearch.common.util.DoubleArray;
import org.opensearch.index.fielddata.MultiGeoPointValues;
import org.opensearch.search.aggregations.Aggregator;
-import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.LeafBucketCollector;
import org.opensearch.search.aggregations.LeafBucketCollectorBase;
import org.opensearch.search.aggregations.support.ValuesSource;
@@ -51,22 +47,11 @@
import java.util.Map;
/**
- * Aggregate all docs into a geographic bounds
+ * Aggregate all docs into a geographic bounds for field GeoPoint.
*
* @opensearch.internal
*/
-final class GeoBoundsAggregator extends MetricsAggregator {
-
- static final ParseField WRAP_LONGITUDE_FIELD = new ParseField("wrap_longitude");
-
- private final ValuesSource.GeoPoint valuesSource;
- private final boolean wrapLongitude;
- DoubleArray tops;
- DoubleArray bottoms;
- DoubleArray posLefts;
- DoubleArray posRights;
- DoubleArray negLefts;
- DoubleArray negRights;
+final class GeoBoundsAggregator extends AbstractGeoBoundsAggregator {
GeoBoundsAggregator(
String name,
@@ -76,25 +61,7 @@ final class GeoBoundsAggregator extends MetricsAggregator {
boolean wrapLongitude,
Map metadata
) throws IOException {
- super(name, aggregationContext, parent, metadata);
- // TODO: stop expecting nulls here
- this.valuesSource = valuesSourceConfig.hasValues() ? (ValuesSource.GeoPoint) valuesSourceConfig.getValuesSource() : null;
- this.wrapLongitude = wrapLongitude;
- if (valuesSource != null) {
- final BigArrays bigArrays = context.bigArrays();
- tops = bigArrays.newDoubleArray(1, false);
- tops.fill(0, tops.size(), Double.NEGATIVE_INFINITY);
- bottoms = bigArrays.newDoubleArray(1, false);
- bottoms.fill(0, bottoms.size(), Double.POSITIVE_INFINITY);
- posLefts = bigArrays.newDoubleArray(1, false);
- posLefts.fill(0, posLefts.size(), Double.POSITIVE_INFINITY);
- posRights = bigArrays.newDoubleArray(1, false);
- posRights.fill(0, posRights.size(), Double.NEGATIVE_INFINITY);
- negLefts = bigArrays.newDoubleArray(1, false);
- negLefts.fill(0, negLefts.size(), Double.POSITIVE_INFINITY);
- negRights = bigArrays.newDoubleArray(1, false);
- negRights.fill(0, negRights.size(), Double.NEGATIVE_INFINITY);
- }
+ super(name, aggregationContext, parent, valuesSourceConfig, wrapLongitude, metadata);
}
@Override
@@ -107,25 +74,10 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
- if (bucket >= tops.size()) {
- long from = tops.size();
- tops = bigArrays.grow(tops, bucket + 1);
- tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY);
- bottoms = bigArrays.resize(bottoms, tops.size());
- bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY);
- posLefts = bigArrays.resize(posLefts, tops.size());
- posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY);
- posRights = bigArrays.resize(posRights, tops.size());
- posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY);
- negLefts = bigArrays.resize(negLefts, tops.size());
- negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY);
- negRights = bigArrays.resize(negRights, tops.size());
- negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY);
- }
+ setBucketSize(bucket, bigArrays);
if (values.advanceExact(doc)) {
final int valuesCount = values.docValueCount();
-
for (int i = 0; i < valuesCount; ++i) {
GeoPoint value = values.nextValue();
double top = tops.get(bucket);
@@ -163,38 +115,4 @@ public void collect(int doc, long bucket) throws IOException {
}
};
}
-
- @Override
- public InternalAggregation buildAggregation(long owningBucketOrdinal) {
- if (valuesSource == null) {
- return buildEmptyAggregation();
- }
- double top = tops.get(owningBucketOrdinal);
- double bottom = bottoms.get(owningBucketOrdinal);
- double posLeft = posLefts.get(owningBucketOrdinal);
- double posRight = posRights.get(owningBucketOrdinal);
- double negLeft = negLefts.get(owningBucketOrdinal);
- double negRight = negRights.get(owningBucketOrdinal);
- return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, metadata());
- }
-
- @Override
- public InternalAggregation buildEmptyAggregation() {
- return new InternalGeoBounds(
- name,
- Double.NEGATIVE_INFINITY,
- Double.POSITIVE_INFINITY,
- Double.POSITIVE_INFINITY,
- Double.NEGATIVE_INFINITY,
- Double.POSITIVE_INFINITY,
- Double.NEGATIVE_INFINITY,
- wrapLongitude,
- metadata()
- );
- }
-
- @Override
- public void doClose() {
- Releasables.close(tops, bottoms, posLefts, posRights, negLefts, negRights);
- }
}
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
similarity index 98%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
index 2c6b75842b6f5..149e052b4db7d 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorFactory.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.search.aggregations.Aggregator;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalGeoBounds.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBounds.java
similarity index 99%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/InternalGeoBounds.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBounds.java
index 87018242ee8df..7c708de88a49c 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalGeoBounds.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBounds.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoPoint;
diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/ParsedGeoBounds.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/ParsedGeoBounds.java
similarity index 98%
rename from server/src/main/java/org/opensearch/search/aggregations/metrics/ParsedGeoBounds.java
rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/ParsedGeoBounds.java
index a482fcfdf08dd..7643ac9d9a010 100644
--- a/server/src/main/java/org/opensearch/search/aggregations/metrics/ParsedGeoBounds.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/ParsedGeoBounds.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.opensearch.common.Nullable;
import org.opensearch.common.collect.Tuple;
diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilderTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilderTests.java
new file mode 100644
index 0000000000000..49b455bbf389e
--- /dev/null
+++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregationBuilderTests.java
@@ -0,0 +1,43 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.geo.search.aggregations.metrics;
+
+import org.opensearch.geo.GeoModulePlugin;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.search.aggregations.BaseAggregationTestCase;
+
+import java.util.Collection;
+import java.util.Collections;
+
+public class GeoBoundsAggregationBuilderTests extends BaseAggregationTestCase {
+
+ /**
+ * This registers the GeoShape mapper with the Tests so that it can be used for testing the aggregation builders
+ *
+ * @return A Collection containing {@link GeoModulePlugin}
+ */
+ protected Collection> getPlugins() {
+ return Collections.singletonList(GeoModulePlugin.class);
+ }
+
+ @Override
+ protected GeoBoundsAggregationBuilder createTestAggregatorBuilder() {
+ GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(randomAlphaOfLengthBetween(1, 20));
+ String field = randomAlphaOfLengthBetween(3, 20);
+ factory.field(field);
+ if (randomBoolean()) {
+ factory.wrapLongitude(randomBoolean());
+ }
+ if (randomBoolean()) {
+ factory.missing("0,0");
+ }
+ return factory;
+ }
+
+}
diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorTests.java
similarity index 88%
rename from server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java
rename to modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorTests.java
index 6440c62e58e18..ee7a3c7e3faa2 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java
+++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsAggregatorTests.java
@@ -30,26 +30,42 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
import org.opensearch.common.geo.GeoPoint;
+import org.opensearch.geo.GeoModulePlugin;
+import org.opensearch.geo.tests.common.AggregationInspectionHelper;
+import org.opensearch.geo.tests.common.RandomGeoGenerator;
import org.opensearch.index.mapper.GeoPointFieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
+import org.opensearch.plugins.SearchPlugin;
import org.opensearch.search.aggregations.AggregatorTestCase;
-import org.opensearch.search.aggregations.support.AggregationInspectionHelper;
-import org.opensearch.test.geo.RandomGeoGenerator;
-import static org.opensearch.search.aggregations.metrics.InternalGeoBoundsTests.GEOHASH_TOLERANCE;
+import java.util.Collections;
+import java.util.List;
+
import static org.hamcrest.Matchers.closeTo;
public class GeoBoundsAggregatorTests extends AggregatorTestCase {
+ public static final double GEOHASH_TOLERANCE = 1E-5D;
+
+ /**
+ * Overriding the Search Plugins list with {@link GeoModulePlugin} so that the testcase will know that this plugin is
+ * to be loaded during the tests.
+ * @return List of {@link SearchPlugin}
+ */
+ @Override
+ protected List getSearchPlugins() {
+ return Collections.singletonList(new GeoModulePlugin());
+ }
+
public void testEmpty() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field").wrapLongitude(false);
diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalGeoBoundsTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBoundsTests.java
similarity index 81%
rename from server/src/test/java/org/opensearch/search/aggregations/metrics/InternalGeoBoundsTests.java
rename to modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBoundsTests.java
index e3857efff5d4d..22915212ff415 100644
--- a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalGeoBoundsTests.java
+++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/InternalGeoBoundsTests.java
@@ -30,11 +30,18 @@
* GitHub history for details.
*/
-package org.opensearch.search.aggregations.metrics;
+package org.opensearch.geo.search.aggregations.metrics;
+import org.opensearch.common.ParseField;
+import org.opensearch.common.xcontent.ContextParser;
+import org.opensearch.common.xcontent.NamedXContentRegistry;
+import org.opensearch.geo.GeoModulePlugin;
+import org.opensearch.plugins.SearchPlugin;
+import org.opensearch.search.aggregations.Aggregation;
import org.opensearch.search.aggregations.ParsedAggregation;
import org.opensearch.test.InternalAggregationTestCase;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -44,6 +51,30 @@
public class InternalGeoBoundsTests extends InternalAggregationTestCase {
static final double GEOHASH_TOLERANCE = 1E-5D;
+ /**
+ * Overriding the method so that tests can get the aggregation specs for namedWriteable.
+ *
+ * @return GeoPlugin
+ */
+ @Override
+ protected SearchPlugin registerPlugin() {
+ return new GeoModulePlugin();
+ }
+
+ /**
+ * Overriding with the {@link ParsedGeoBounds} so that it can be parsed. We need to do this as {@link GeoModulePlugin}
+ * is registering this Aggregation.
+ *
+ * @return a List of {@link NamedXContentRegistry.Entry}
+ */
+ @Override
+ protected List getNamedXContents() {
+ final List namedXContents = new ArrayList<>(getDefaultNamedXContents());
+ final ContextParser