Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DerivedField: PR-2 Implementation for all supported types and DerivedField… #83

Closed
wants to merge 9 commits into from
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Remote reindex: Add support for configurable retry mechanism ([#12561](https://github.com/opensearch-project/OpenSearch/pull/12561))
- [Admission Control] Integrate IO Usage Tracker to the Resource Usage Collector Service and Emit IO Usage Stats ([#11880](https://github.com/opensearch-project/OpenSearch/pull/11880))
- Tracing for deep search path ([#12103](https://github.com/opensearch-project/OpenSearch/pull/12103))
- Add explicit dependency to validatePom and generatePom tasks ([#12103](https://github.com/opensearch-project/OpenSearch/pull/12807))
- Add explicit dependency to validatePom and generatePom tasks ([#12807](https://github.com/opensearch-project/OpenSearch/pull/12807))
- Replace configureEach with all for publication iteration ([#12876](https://github.com/opensearch-project/OpenSearch/pull/12876))

### Dependencies
- Bump `log4j-core` from 2.18.0 to 2.19.0
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
[![Security Vulnerabilities](https://img.shields.io/github/issues/opensearch-project/OpenSearch/security%20vulnerability?labelColor=red)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"security%20vulnerability")
[![Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch)](https://github.com/opensearch-project/OpenSearch/issues)
[![Open Pull Requests](https://img.shields.io/github/issues-pr/opensearch-project/OpenSearch)](https://github.com/opensearch-project/OpenSearch/pulls)
[![2.10 Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch/v2.10.0)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"v2.10.0")
[![3.0 Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch/v3.0.0)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"v3.0.0")
[![2.14.0 Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch/v2.14.0)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"v2.14.0")
[![3.0.0 Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch/v3.0.0)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"v3.0.0")
[![GHA gradle check](https://github.com/opensearch-project/OpenSearch/actions/workflows/gradle-check.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/gradle-check.yml)
[![GHA validate pull request](https://github.com/opensearch-project/OpenSearch/actions/workflows/wrapper.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/wrapper.yml)
[![GHA precommit](https://github.com/opensearch-project/OpenSearch/actions/workflows/precommit.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/precommit.yml)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public class PomValidationPrecommitPlugin extends PrecommitPlugin {
public TaskProvider<? extends Task> createTask(Project project) {
TaskProvider<Task> validatePom = project.getTasks().register("validatePom");
PublishingExtension publishing = project.getExtensions().getByType(PublishingExtension.class);
publishing.getPublications().configureEach(publication -> {
publishing.getPublications().all(publication -> {
String publicationName = Util.capitalize(publication.getName());
TaskProvider<PomValidationTask> validateTask = project.getTasks()
.register("validate" + publicationName + "Pom", PomValidationTask.class);
Expand All @@ -59,7 +59,7 @@ public TaskProvider<? extends Task> createTask(Project project) {
validateTask.configure(task -> {
task.dependsOn(generateMavenPom);
task.getPomFile().fileProvider(generateMavenPom.map(GenerateMavenPom::getDestination));
publishing.getPublications().configureEach(publicationForPomGen -> {
publishing.getPublications().all(publicationForPomGen -> {
task.mustRunAfter(
project.getTasks()
.withType(GenerateMavenPom.class)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ public class GradleThreadsFilter implements ThreadFilter {
public boolean reject(Thread t) {
return t.getName().startsWith("Exec process")
|| t.getName().startsWith("Memory manager")
|| t.getName().startsWith("File watcher consumer");
|| t.getName().startsWith("File watcher consumer")
|| t.getName().startsWith("sshd-SshClient") /* Started by SshClient (sshd-core), part of SftpFileSystemProvider */
|| t.getName().startsWith("Thread-"); /* Started by AbstractFactoryManager (sshd-core), part of SftpFileSystemProvider */
}
}
4 changes: 2 additions & 2 deletions gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d
distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
- do:
scripts_painless_context: {}
- match: { contexts.0: aggregation_selector}
- match: { contexts.23: update}
- match: { contexts.24: update}
---

"Action to get all API values for score context":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,10 @@
* compatible open source license.
*/

package org.opensearch.cluster.state;
package org.opensearch.action.support.clustermanager.term;

import org.opensearch.action.admin.cluster.state.ClusterStateRequest;
import org.opensearch.action.admin.cluster.state.ClusterStateResponse;
import org.opensearch.action.admin.cluster.state.term.GetTermVersionAction;
import org.opensearch.action.admin.cluster.state.term.GetTermVersionResponse;
import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.opensearch.cluster.ClusterName;
import org.opensearch.cluster.coordination.ClusterStateTermVersion;
Expand Down
4 changes: 2 additions & 2 deletions server/src/main/java/org/opensearch/action/ActionModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,6 @@
import org.opensearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction;
import org.opensearch.action.admin.cluster.state.ClusterStateAction;
import org.opensearch.action.admin.cluster.state.TransportClusterStateAction;
import org.opensearch.action.admin.cluster.state.term.GetTermVersionAction;
import org.opensearch.action.admin.cluster.state.term.TransportGetTermVersionAction;
import org.opensearch.action.admin.cluster.stats.ClusterStatsAction;
import org.opensearch.action.admin.cluster.stats.TransportClusterStatsAction;
import org.opensearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction;
Expand Down Expand Up @@ -283,6 +281,8 @@
import org.opensearch.action.support.AutoCreateIndex;
import org.opensearch.action.support.DestructiveOperations;
import org.opensearch.action.support.TransportAction;
import org.opensearch.action.support.clustermanager.term.GetTermVersionAction;
import org.opensearch.action.support.clustermanager.term.TransportGetTermVersionAction;
import org.opensearch.action.termvectors.MultiTermVectorsAction;
import org.opensearch.action.termvectors.TermVectorsAction;
import org.opensearch.action.termvectors.TransportMultiTermVectorsAction;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.opensearch.action.ActionListenerResponseHandler;
import org.opensearch.action.ActionRunnable;
import org.opensearch.action.admin.cluster.state.term.GetTermVersionAction;
import org.opensearch.action.admin.cluster.state.term.GetTermVersionRequest;
import org.opensearch.action.admin.cluster.state.term.GetTermVersionResponse;
import org.opensearch.action.bulk.BackoffPolicy;
import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.HandledTransportAction;
import org.opensearch.action.support.RetryableAction;
import org.opensearch.action.support.clustermanager.term.GetTermVersionAction;
import org.opensearch.action.support.clustermanager.term.GetTermVersionRequest;
import org.opensearch.action.support.clustermanager.term.GetTermVersionResponse;
import org.opensearch.cluster.ClusterManagerNodeChangePredicate;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.ClusterStateObserver;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
* compatible open source license.
*/

package org.opensearch.action.admin.cluster.state.term;
package org.opensearch.action.support.clustermanager.term;

import org.opensearch.action.ActionType;

Expand All @@ -18,7 +18,7 @@
public class GetTermVersionAction extends ActionType<GetTermVersionResponse> {

public static final GetTermVersionAction INSTANCE = new GetTermVersionAction();
public static final String NAME = "cluster:monitor/term";
public static final String NAME = "internal:monitor/term";

private GetTermVersionAction() {
super(NAME, GetTermVersionResponse::new);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
* compatible open source license.
*/

package org.opensearch.action.admin.cluster.state.term;
package org.opensearch.action.support.clustermanager.term;

import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.support.clustermanager.ClusterManagerNodeReadRequest;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
* compatible open source license.
*/

package org.opensearch.action.admin.cluster.state.term;
package org.opensearch.action.support.clustermanager.term;

import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.coordination.ClusterStateTermVersion;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
* compatible open source license.
*/

package org.opensearch.action.admin.cluster.state.term;
package org.opensearch.action.support.clustermanager.term;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
*/

/** Cluster Term transport handler. */
package org.opensearch.action.admin.cluster.state.term;
package org.opensearch.action.support.clustermanager.term;
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.index.mapper;

import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.document.KeywordField;
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.opensearch.Version;
import org.opensearch.common.Booleans;
import org.opensearch.common.lucene.Lucene;
import org.opensearch.common.network.InetAddresses;

import java.net.InetAddress;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;

/**
* Contains logic to get the FieldMapper for a given type of derived field. Also, for a given type of derived field,
* it is used to create an IndexableField for the provided type and object. It is useful when indexing into
* lucene MemoryIndex in {@link org.opensearch.index.query.DerivedFieldQuery}.
*/
enum DerivedFieldSupportedTypes {

BOOLEAN("boolean", (name, context) -> {
BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder(name);
return builder.build(context);
}, name -> o -> {
// Trying to mimic the logic for parsing source value as used in BooleanFieldMapper valueFetcher
Boolean value;
if (o instanceof Boolean) {
value = (Boolean) o;
} else {
String textValue = o.toString();
value = Booleans.parseBooleanStrict(textValue, false);
}
return new Field(name, value ? "T" : "F", BooleanFieldMapper.Defaults.FIELD_TYPE);
}),
DATE("date", (name, context) -> {
// TODO: should we support mapping settings exposed by a given field type from derived fields too?
// for example, support `format` for date type?
DateFieldMapper.Builder builder = new DateFieldMapper.Builder(
name,
DateFieldMapper.Resolution.MILLISECONDS,
DateFieldMapper.getDefaultDateTimeFormatter(),
false,
Version.CURRENT
);
return builder.build(context);
}, name -> o -> new LongPoint(name, (long) o)),
GEO_POINT("geo_point", (name, context) -> {
GeoPointFieldMapper.Builder builder = new GeoPointFieldMapper.Builder(name);
return builder.build(context);
}, name -> o -> {
// convert o to array of double
if (!(o instanceof List) || ((List<?>) o).size() != 2 || !(((List<?>) o).get(0) instanceof Double)) {
throw new ClassCastException("geo_point should be in format emit(double lat, double lon) for derived fields");
}
return new LatLonPoint(name, (Double) ((List<?>) o).get(0), (Double) ((List<?>) o).get(1));
}),
IP("ip", (name, context) -> {
IpFieldMapper.Builder builder = new IpFieldMapper.Builder(name, false, Version.CURRENT);
return builder.build(context);
}, name -> o -> {
InetAddress address;
if (o instanceof InetAddress) {
address = (InetAddress) o;
} else {
address = InetAddresses.forString(o.toString());
}
return new InetAddressPoint(name, address);
}),
KEYWORD("keyword", (name, context) -> {
FieldType dummyFieldType = new FieldType();
dummyFieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
KeywordFieldMapper.Builder keywordBuilder = new KeywordFieldMapper.Builder(name);
KeywordFieldMapper.KeywordFieldType keywordFieldType = keywordBuilder.buildFieldType(context, dummyFieldType);
keywordFieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
return new KeywordFieldMapper(
name,
dummyFieldType,
keywordFieldType,
keywordBuilder.multiFieldsBuilder.build(keywordBuilder, context),
keywordBuilder.copyTo.build(),
keywordBuilder
);
}, name -> o -> new KeywordField(name, (String) o, Field.Store.NO)),
LONG("long", (name, context) -> {
NumberFieldMapper.Builder longBuilder = new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG, false, false);
return longBuilder.build(context);
}, name -> o -> new LongField(name, Long.parseLong(o.toString()), Field.Store.NO)),
DOUBLE("double", (name, context) -> {
NumberFieldMapper.Builder doubleBuilder = new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.DOUBLE, false, false);
return doubleBuilder.build(context);
}, name -> o -> new DoubleField(name, Double.parseDouble(o.toString()), Field.Store.NO));

final String name;
private final BiFunction<String, Mapper.BuilderContext, FieldMapper> builder;

private final Function<String, Function<Object, IndexableField>> indexableFieldBuilder;

DerivedFieldSupportedTypes(
String name,
BiFunction<String, Mapper.BuilderContext, FieldMapper> builder,
Function<String, Function<Object, IndexableField>> indexableFieldBuilder
) {
this.name = name;
this.builder = builder;
this.indexableFieldBuilder = indexableFieldBuilder;
}

public String getName() {
return name;
}

private FieldMapper getFieldMapper(String name, Mapper.BuilderContext context) {
return builder.apply(name, context);
}

private Function<Object, IndexableField> getIndexableFieldGenerator(String name) {
return indexableFieldBuilder.apply(name);
}

private static final Map<String, DerivedFieldSupportedTypes> enumMap = Arrays.stream(DerivedFieldSupportedTypes.values())
.collect(Collectors.toMap(DerivedFieldSupportedTypes::getName, enumValue -> enumValue));

public static FieldMapper getFieldMapperFromType(String type, String name, Mapper.BuilderContext context) {
if (!enumMap.containsKey(type)) {
throw new IllegalArgumentException("Type [" + type + "] isn't supported in Derived field context.");
}
return enumMap.get(type).getFieldMapper(name, context);
}

public static Function<Object, IndexableField> getIndexableFieldGeneratorType(String type, String name) {
if (!enumMap.containsKey(type)) {
throw new IllegalArgumentException("Type [" + type + "] isn't supported in Derived field context.");
}
return enumMap.get(type).getIndexableFieldGenerator(name);
}
}
Loading
Loading