collection) {
return collection.toArray(new String[0]);
}
- /**
- * Take a String which is a delimited list and convert it to a String array.
- * A single delimiter can consists of more than one character: It will still
- * be considered as single delimiter string, rather than as bunch of potential
- * delimiter characters - in contrast to tokenizeToStringArray
.
- *
- * @param str the input String
- * @param delimiter the delimiter between elements (this is a single delimiter,
- * rather than a bunch individual delimiter characters)
- * @param charsToDelete a set of characters to delete. Useful for deleting unwanted
- * line breaks: e.g. "\r\n\f" will delete all new lines and line feeds in a String.
- * @return an array of the tokens in the list
- * @see #tokenizeToStringArray
- */
- public static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) {
- if (str == null) {
- return Strings.EMPTY_ARRAY;
- }
- if (delimiter == null) {
- return new String[] { str };
- }
- List result = new ArrayList<>();
- if ("".equals(delimiter)) {
- for (int i = 0; i < str.length(); i++) {
- result.add(deleteAny(str.substring(i, i + 1), charsToDelete));
- }
- } else {
- int pos = 0;
- int delPos;
- while ((delPos = str.indexOf(delimiter, pos)) != -1) {
- result.add(deleteAny(str.substring(pos, delPos), charsToDelete));
- pos = delPos + delimiter.length();
- }
- if (str.length() > 0 && pos <= str.length()) {
- // Add rest of String, but not in case of empty input.
- result.add(deleteAny(str.substring(pos), charsToDelete));
- }
- }
- return toStringArray(result);
- }
-
/**
* Tokenize the specified string by commas to a set, trimming whitespace and ignoring empty tokens.
*
@@ -393,6 +357,41 @@ public static Set tokenizeByCommaToSet(final String s) {
return tokenizeToCollection(s, ",", HashSet::new);
}
+ /**
+ * Split the specified string by commas to an array.
+ *
+ * @param s the string to split
+ * @return the array of split values
+ * @see String#split(String)
+ */
+ public static String[] splitStringByCommaToArray(final String s) {
+ if (s == null || s.isEmpty()) return Strings.EMPTY_ARRAY;
+ else return s.split(",");
+ }
+
+ /**
+ * Split a String at the first occurrence of the delimiter.
+ * Does not include the delimiter in the result.
+ *
+ * @param toSplit the string to split
+ * @param delimiter to split the string up with
+ * @return a two element array with index 0 being before the delimiter, and
+ * index 1 being after the delimiter (neither element includes the delimiter);
+ * or null
if the delimiter wasn't found in the given input String
+ */
+ public static String[] split(String toSplit, String delimiter) {
+ if (hasLength(toSplit) == false || hasLength(delimiter) == false) {
+ return null;
+ }
+ int offset = toSplit.indexOf(delimiter);
+ if (offset < 0) {
+ return null;
+ }
+ String beforeDelimiter = toSplit.substring(0, offset);
+ String afterDelimiter = toSplit.substring(offset + delimiter.length());
+ return new String[] { beforeDelimiter, afterDelimiter };
+ }
+
/**
* Tokenize the given String into a String array via a StringTokenizer.
* Trims tokens and omits empty tokens.
@@ -446,6 +445,47 @@ private static > T tokenizeToCollection(
return tokens;
}
+ /**
+ * Take a String which is a delimited list and convert it to a String array.
+ * A single delimiter can consists of more than one character: It will still
+ * be considered as single delimiter string, rather than as bunch of potential
+ * delimiter characters - in contrast to tokenizeToStringArray
.
+ *
+ * @param str the input String
+ * @param delimiter the delimiter between elements (this is a single delimiter,
+ * rather than a bunch individual delimiter characters)
+ * @param charsToDelete a set of characters to delete. Useful for deleting unwanted
+ * line breaks: e.g. "\r\n\f" will delete all new lines and line feeds in a String.
+ * @return an array of the tokens in the list
+ * @see #tokenizeToStringArray
+ */
+ public static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) {
+ if (str == null) {
+ return Strings.EMPTY_ARRAY;
+ }
+ if (delimiter == null) {
+ return new String[] { str };
+ }
+ List result = new ArrayList<>();
+ if ("".equals(delimiter)) {
+ for (int i = 0; i < str.length(); i++) {
+ result.add(deleteAny(str.substring(i, i + 1), charsToDelete));
+ }
+ } else {
+ int pos = 0;
+ int delPos;
+ while ((delPos = str.indexOf(delimiter, pos)) != -1) {
+ result.add(deleteAny(str.substring(pos, delPos), charsToDelete));
+ pos = delPos + delimiter.length();
+ }
+ if (str.length() > 0 && pos <= str.length()) {
+ // Add rest of String, but not in case of empty input.
+ result.add(deleteAny(str.substring(pos), charsToDelete));
+ }
+ }
+ return toStringArray(result);
+ }
+
/**
* Take a String which is a delimited list and convert it to a String array.
* A single delimiter can consists of more than one character: It will still
@@ -486,33 +526,293 @@ public static Set commaDelimitedListToSet(String str) {
return set;
}
- public static boolean isNullOrEmpty(@Nullable String s) {
- return s == null || s.isEmpty();
+ /**
+ * Convenience method to return a Collection as a delimited (e.g. CSV)
+ * String. E.g. useful for toString()
implementations.
+ *
+ * @param coll the Collection to display
+ * @param delim the delimiter to use (probably a ",")
+ * @param prefix the String to start each element with
+ * @param suffix the String to end each element with
+ * @return the delimited String
+ */
+ public static String collectionToDelimitedString(Iterable> coll, String delim, String prefix, String suffix) {
+ StringBuilder sb = new StringBuilder();
+ collectionToDelimitedString(coll, delim, prefix, suffix, sb);
+ return sb.toString();
+ }
+
+ public static void collectionToDelimitedString(Iterable> coll, String delim, String prefix, String suffix, StringBuilder sb) {
+ Iterator> it = coll.iterator();
+ while (it.hasNext()) {
+ sb.append(prefix).append(it.next()).append(suffix);
+ if (it.hasNext()) {
+ sb.append(delim);
+ }
+ }
}
/**
- * Capitalize a String
, changing the first letter to
- * upper case as per {@link Character#toUpperCase(char)}.
- * No other letters are changed.
+ * Convenience method to return a Collection as a delimited (e.g. CSV)
+ * String. E.g. useful for toString()
implementations.
*
- * @param str the String to capitalize, may be null
- * @return the capitalized String, null
if null
+ * @param coll the Collection to display
+ * @param delim the delimiter to use (probably a ",")
+ * @return the delimited String
*/
- public static String capitalize(String str) {
- return changeFirstCharacterCase(str, true);
+ public static String collectionToDelimitedString(Iterable> coll, String delim) {
+ return collectionToDelimitedString(coll, delim, "", "");
}
- private static String changeFirstCharacterCase(String str, boolean capitalize) {
- if (str == null || str.length() == 0) {
- return str;
+ /**
+ * Convenience method to return a Collection as a CSV String.
+ * E.g. useful for toString()
implementations.
+ *
+ * @param coll the Collection to display
+ * @return the delimited String
+ */
+ public static String collectionToCommaDelimitedString(Iterable> coll) {
+ return collectionToDelimitedString(coll, ",");
+ }
+
+ /**
+ * Convenience method to return a String array as a delimited (e.g. CSV)
+ * String. E.g. useful for toString()
implementations.
+ *
+ * @param arr the array to display
+ * @param delim the delimiter to use (probably a ",")
+ * @return the delimited String
+ */
+ public static String arrayToDelimitedString(Object[] arr, String delim) {
+ StringBuilder sb = new StringBuilder();
+ arrayToDelimitedString(arr, delim, sb);
+ return sb.toString();
+ }
+
+ public static void arrayToDelimitedString(Object[] arr, String delim, StringBuilder sb) {
+ if (isEmpty(arr)) {
+ return;
}
- StringBuilder sb = new StringBuilder(str.length());
- if (capitalize) {
- sb.append(Character.toUpperCase(str.charAt(0)));
+ for (int i = 0; i < arr.length; i++) {
+ if (i > 0) {
+ sb.append(delim);
+ }
+ sb.append(arr[i]);
+ }
+ }
+
+ /**
+ * Convenience method to return a String array as a CSV String.
+ * E.g. useful for toString()
implementations.
+ *
+ * @param arr the array to display
+ * @return the delimited String
+ */
+ public static String arrayToCommaDelimitedString(Object[] arr) {
+ return arrayToDelimitedString(arr, ",");
+ }
+
+ /**
+ * Format the double value with a single decimal points, trimming trailing '.0'.
+ */
+ public static String format1Decimals(double value, String suffix) {
+ String p = String.valueOf(value);
+ int ix = p.indexOf('.') + 1;
+ int ex = p.indexOf('E');
+ char fraction = p.charAt(ix);
+ if (fraction == '0') {
+ if (ex != -1) {
+ return p.substring(0, ix - 1) + p.substring(ex) + suffix;
+ } else {
+ return p.substring(0, ix - 1) + suffix;
+ }
} else {
- sb.append(Character.toLowerCase(str.charAt(0)));
+ if (ex != -1) {
+ return p.substring(0, ix) + fraction + p.substring(ex) + suffix;
+ } else {
+ return p.substring(0, ix) + fraction + suffix;
+ }
}
- sb.append(str.substring(1));
- return sb.toString();
+ }
+
+ /**
+ * Determine whether the given array is empty:
+ * i.e. null
or of zero length.
+ *
+ * @param array the array to check
+ */
+ private static boolean isEmpty(final Object[] array) {
+ return (array == null || array.length == 0);
+ }
+
+ public static byte[] toUTF8Bytes(CharSequence charSequence) {
+ return toUTF8Bytes(charSequence, new BytesRefBuilder());
+ }
+
+ public static byte[] toUTF8Bytes(CharSequence charSequence, BytesRefBuilder spare) {
+ spare.copyChars(charSequence);
+ return Arrays.copyOf(spare.bytes(), spare.length());
+ }
+
+ /**
+ * Return substring(beginIndex, endIndex) that is impervious to string length.
+ */
+ public static String substring(String s, int beginIndex, int endIndex) {
+ if (s == null) {
+ return s;
+ }
+
+ int realEndIndex = s.length() > 0 ? s.length() - 1 : 0;
+
+ if (endIndex > realEndIndex) {
+ return s.substring(beginIndex);
+ } else {
+ return s.substring(beginIndex, endIndex);
+ }
+ }
+
+ /**
+ * If an array only consists of zero or one element, which is "*" or "_all" return an empty array
+ * which is usually used as everything
+ */
+ public static boolean isAllOrWildcard(String[] data) {
+ return CollectionUtils.isEmpty(data) || data.length == 1 && isAllOrWildcard(data[0]);
+ }
+
+ /**
+ * Returns `true` if the string is `_all` or `*`.
+ */
+ public static boolean isAllOrWildcard(String data) {
+ return "_all".equals(data) || "*".equals(data);
+ }
+
+ /**
+ * Return a {@link String} that is the json representation of the provided {@link ToXContent}.
+ * Wraps the output into an anonymous object if needed. The content is not pretty-printed
+ * nor human readable.
+ */
+ public static String toString(MediaType mediaType, ToXContent toXContent) {
+ return toString(mediaType, toXContent, false, false);
+ }
+
+ /**
+ * Return a {@link String} that is the json representation of the provided {@link ToXContent}.
+ * Wraps the output into an anonymous object if needed.
+ * Allows to configure the params.
+ * The content is not pretty-printed nor human readable.
+ */
+ public static String toString(MediaType mediaType, ToXContent toXContent, ToXContent.Params params) {
+ return toString(mediaType, toXContent, params, false, false);
+ }
+
+ /**
+ * Return a {@link String} that is the json representation of the provided {@link ToXContent}.
+ * Wraps the output into an anonymous object if needed. Allows to control whether the outputted
+ * json needs to be pretty printed and human readable.
+ *
+ */
+ public static String toString(MediaType mediaType, ToXContent toXContent, boolean pretty, boolean human) {
+ return toString(mediaType, toXContent, ToXContent.EMPTY_PARAMS, pretty, human);
+ }
+
+ /**
+ * Return a {@link String} that is the json representation of the provided {@link ToXContent}.
+ * Wraps the output into an anonymous object if needed.
+ * Allows to configure the params.
+ * Allows to control whether the outputted json needs to be pretty printed and human readable.
+ */
+ private static String toString(MediaType mediaType, ToXContent toXContent, ToXContent.Params params, boolean pretty, boolean human) {
+ try {
+ XContentBuilder builder = createBuilder(mediaType, pretty, human);
+ if (toXContent.isFragment()) {
+ builder.startObject();
+ }
+ toXContent.toXContent(builder, params);
+ if (toXContent.isFragment()) {
+ builder.endObject();
+ }
+ return builder.toString();
+ } catch (IOException e) {
+ try {
+ XContentBuilder builder = createBuilder(mediaType, pretty, human);
+ builder.startObject();
+ builder.field("error", "error building toString out of XContent: " + e.getMessage());
+ builder.field("stack_trace", ExceptionsHelper.stackTrace(e));
+ builder.endObject();
+ return builder.toString();
+ } catch (IOException e2) {
+ throw new OpenSearchException("cannot generate error message for deserialization", e);
+ }
+ }
+ }
+
+ private static XContentBuilder createBuilder(MediaType mediaType, boolean pretty, boolean human) throws IOException {
+ XContentBuilder builder = XContentBuilder.builder(mediaType.xContent());
+ if (pretty) {
+ builder.prettyPrint();
+ }
+ if (human) {
+ builder.humanReadable(true);
+ }
+ return builder;
+ }
+
+ /**
+ * Truncates string to a length less than length. Backtracks to throw out
+ * high surrogates.
+ */
+ public static String cleanTruncate(String s, int length) {
+ if (s == null) {
+ return s;
+ }
+ /*
+ * Its pretty silly for you to truncate to 0 length but just in case
+ * someone does this shouldn't break.
+ */
+ if (length == 0) {
+ return "";
+ }
+ if (length >= s.length()) {
+ return s;
+ }
+ if (Character.isHighSurrogate(s.charAt(length - 1))) {
+ length--;
+ }
+ return s.substring(0, length);
+ }
+
+ public static boolean isNullOrEmpty(@Nullable String s) {
+ return s == null || s.isEmpty();
+ }
+
+ public static String padStart(String s, int minimumLength, char c) {
+ if (s == null) {
+ throw new NullPointerException("s");
+ }
+ if (s.length() >= minimumLength) {
+ return s;
+ } else {
+ StringBuilder sb = new StringBuilder(minimumLength);
+ for (int i = s.length(); i < minimumLength; i++) {
+ sb.append(c);
+ }
+
+ sb.append(s);
+ return sb.toString();
+ }
+ }
+
+ public static String toLowercaseAscii(String in) {
+ StringBuilder out = new StringBuilder();
+ Iterator iter = in.codePoints().iterator();
+ while (iter.hasNext()) {
+ int codepoint = iter.next();
+ if (codepoint > 128) {
+ out.appendCodePoint(codepoint);
+ } else {
+ out.appendCodePoint(Character.toLowerCase(codepoint));
+ }
+ }
+ return out.toString();
}
}
diff --git a/server/src/main/java/org/opensearch/common/breaker/CircuitBreaker.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java
similarity index 99%
rename from server/src/main/java/org/opensearch/common/breaker/CircuitBreaker.java
rename to libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java
index 4cbd375e8c1ff..0f75f763d21c1 100644
--- a/server/src/main/java/org/opensearch/common/breaker/CircuitBreaker.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.common.breaker;
+package org.opensearch.core.common.breaker;
import java.util.Locale;
diff --git a/server/src/main/java/org/opensearch/common/breaker/CircuitBreakingException.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java
similarity index 98%
rename from server/src/main/java/org/opensearch/common/breaker/CircuitBreakingException.java
rename to libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java
index 4cab014912970..4869127ee473d 100644
--- a/server/src/main/java/org/opensearch/common/breaker/CircuitBreakingException.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java
@@ -29,7 +29,7 @@
* GitHub history for details.
*/
-package org.opensearch.common.breaker;
+package org.opensearch.core.common.breaker;
import org.opensearch.OpenSearchException;
import org.opensearch.core.common.io.stream.StreamInput;
diff --git a/server/src/main/java/org/opensearch/common/breaker/NoopCircuitBreaker.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java
similarity index 98%
rename from server/src/main/java/org/opensearch/common/breaker/NoopCircuitBreaker.java
rename to libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java
index ddd72280faa4f..86a0a7ccb96fd 100644
--- a/server/src/main/java/org/opensearch/common/breaker/NoopCircuitBreaker.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.common.breaker;
+package org.opensearch.core.common.breaker;
/**
* A CircuitBreaker that doesn't increment or adjust, and all operations are
diff --git a/libs/core/src/main/java/org/opensearch/core/common/breaker/package-info.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/package-info.java
new file mode 100644
index 0000000000000..f9fb83d2207e1
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/package-info.java
@@ -0,0 +1,12 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Foundation classes for the Circuit Breaker
+ */
+package org.opensearch.core.common.breaker;
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
index 1d7321bf2c6de..d9040da569345 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
@@ -54,7 +54,7 @@
import org.opensearch.core.common.Strings;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
import org.opensearch.core.xcontent.MediaType;
-import org.opensearch.core.xcontent.MediaTypeParserRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
@@ -347,7 +347,7 @@ public BigInteger readBigInteger() throws IOException {
}
public MediaType readMediaType() throws IOException {
- return MediaTypeParserRegistry.fromMediaType(readString());
+ return MediaTypeRegistry.fromMediaType(readString());
}
@Nullable
diff --git a/server/src/main/java/org/opensearch/common/transport/BoundTransportAddress.java b/libs/core/src/main/java/org/opensearch/core/common/transport/BoundTransportAddress.java
similarity index 98%
rename from server/src/main/java/org/opensearch/common/transport/BoundTransportAddress.java
rename to libs/core/src/main/java/org/opensearch/core/common/transport/BoundTransportAddress.java
index 3a9c337f2d950..8908a172395f2 100644
--- a/server/src/main/java/org/opensearch/common/transport/BoundTransportAddress.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/transport/BoundTransportAddress.java
@@ -30,12 +30,12 @@
* GitHub history for details.
*/
-package org.opensearch.common.transport;
+package org.opensearch.core.common.transport;
+import org.opensearch.common.network.InetAddresses;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.network.InetAddresses;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/common/transport/TransportAddress.java b/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java
similarity index 99%
rename from server/src/main/java/org/opensearch/common/transport/TransportAddress.java
rename to libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java
index 737e8f3496143..1a853877ed0b9 100644
--- a/server/src/main/java/org/opensearch/common/transport/TransportAddress.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java
@@ -30,12 +30,12 @@
* GitHub history for details.
*/
-package org.opensearch.common.transport;
+package org.opensearch.core.common.transport;
+import org.opensearch.common.network.NetworkAddress;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.network.NetworkAddress;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/libs/core/src/main/java/org/opensearch/core/common/transport/package-info.java b/libs/core/src/main/java/org/opensearch/core/common/transport/package-info.java
new file mode 100644
index 0000000000000..21d2abfce958a
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/common/transport/package-info.java
@@ -0,0 +1,10 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/** Common / Base Transport classes used to implement the OpenSearch transport layer */
+package org.opensearch.core.common.transport;
diff --git a/server/src/main/java/org/opensearch/common/unit/ByteSizeUnit.java b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java
similarity index 99%
rename from server/src/main/java/org/opensearch/common/unit/ByteSizeUnit.java
rename to libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java
index b95e39feb8fac..68486dd7c975f 100644
--- a/server/src/main/java/org/opensearch/common/unit/ByteSizeUnit.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.common.unit;
+package org.opensearch.core.common.unit;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
diff --git a/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeValue.java
similarity index 88%
rename from server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java
rename to libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeValue.java
index a123c79464727..529501226f5e3 100644
--- a/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeValue.java
@@ -30,16 +30,13 @@
* GitHub history for details.
*/
-package org.opensearch.common.unit;
+package org.opensearch.core.common.unit;
import org.opensearch.OpenSearchParseException;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.logging.DeprecationLogger;
-import org.opensearch.common.logging.LogConfigurator;
-import org.opensearch.common.network.NetworkService;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
@@ -54,17 +51,6 @@
*/
public class ByteSizeValue implements Writeable, Comparable, ToXContentFragment {
- /**
- * We have to lazy initialize the deprecation logger as otherwise a static logger here would be constructed before logging is configured
- * leading to a runtime failure (see {@link LogConfigurator#checkErrorListener()} ). The premature construction would come from any
- * {@link ByteSizeValue} object constructed in, for example, settings in {@link NetworkService}.
- *
- * @opensearch.internal
- */
- static class DeprecationLoggerHolder {
- static DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ByteSizeValue.class);
- }
-
public static final ByteSizeValue ZERO = new ByteSizeValue(0, ByteSizeUnit.BYTES);
private final long size;
@@ -262,14 +248,14 @@ private static ByteSizeValue parse(
return new ByteSizeValue(Long.parseLong(s), unit);
} catch (final NumberFormatException e) {
try {
- final double doubleValue = Double.parseDouble(s);
- DeprecationLoggerHolder.deprecationLogger.deprecate(
- "fractional_byte_values",
- "Fractional bytes values are deprecated. Use non-fractional bytes values instead: [{}] found for setting [{}]",
+ Double.parseDouble(s);
+ throw new OpenSearchParseException(
+ "Failed to parse bytes value [{}]. Fractional bytes values have been "
+ + "deprecated since Legacy 6.2. Use non-fractional bytes values instead: found for setting [{}]",
+ e,
initialInput,
settingName
);
- return new ByteSizeValue((long) (doubleValue * unit.toBytes(1)));
} catch (final NumberFormatException ignored) {
throw new OpenSearchParseException("failed to parse [{}]", e, initialInput);
}
diff --git a/libs/core/src/main/java/org/opensearch/core/common/unit/package-info.java b/libs/core/src/main/java/org/opensearch/core/common/unit/package-info.java
new file mode 100644
index 0000000000000..79b5dcdcba3b6
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/common/unit/package-info.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Common units of measurement used by the core library. These units of measurement classes exist
+ * in the core because they depend on core functionality beyond the common library (e.g., serializable).
+ *
+ * @opensearch.api
+ * @opensearch.experimental
+ */
+package org.opensearch.core.common.unit;
diff --git a/server/src/main/java/org/opensearch/common/util/CollectionUtils.java b/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java
similarity index 97%
rename from server/src/main/java/org/opensearch/common/util/CollectionUtils.java
rename to libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java
index 2037b2e46488f..e8dd31fcf1869 100644
--- a/server/src/main/java/org/opensearch/common/util/CollectionUtils.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.common.util;
+package org.opensearch.core.common.util;
import org.opensearch.common.collect.Iterators;
import org.opensearch.core.common.Strings;
@@ -134,6 +134,7 @@ public static void ensureNoSelfReferences(Object value, String messageHint) {
}
}
+ @SuppressWarnings("unchecked")
private static Iterable> convert(Object value) {
if (value == null) {
return null;
@@ -217,6 +218,7 @@ public int size() {
}
}
+ @SuppressWarnings("unchecked")
public static ArrayList iterableAsArrayList(Iterable extends E> elements) {
if (elements == null) {
throw new NullPointerException("elements");
@@ -232,6 +234,7 @@ public static ArrayList iterableAsArrayList(Iterable extends E> element
}
}
+ @SuppressWarnings("unchecked")
public static ArrayList arrayAsArrayList(E... elements) {
if (elements == null) {
throw new NullPointerException("elements");
@@ -239,6 +242,7 @@ public static ArrayList arrayAsArrayList(E... elements) {
return new ArrayList<>(Arrays.asList(elements));
}
+ @SuppressWarnings("unchecked")
public static ArrayList asArrayList(E first, E... other) {
if (other == null) {
throw new NullPointerException("other");
@@ -249,6 +253,7 @@ public static ArrayList asArrayList(E first, E... other) {
return list;
}
+ @SuppressWarnings("unchecked")
public static ArrayList asArrayList(E first, E second, E... other) {
if (other == null) {
throw new NullPointerException("other");
diff --git a/server/src/main/java/org/opensearch/indices/breaker/AllCircuitBreakerStats.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java
similarity index 98%
rename from server/src/main/java/org/opensearch/indices/breaker/AllCircuitBreakerStats.java
rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java
index 83f3f9532948f..ab887acb85a87 100644
--- a/server/src/main/java/org/opensearch/indices/breaker/AllCircuitBreakerStats.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.indices.breaker;
+package org.opensearch.core.indices.breaker;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
diff --git a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerService.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java
similarity index 92%
rename from server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerService.java
rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java
index b5cc1a6b1c6c5..ee9c94f432a36 100644
--- a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerService.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java
@@ -30,12 +30,12 @@
* GitHub history for details.
*/
-package org.opensearch.indices.breaker;
+package org.opensearch.core.indices.breaker;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.component.AbstractLifecycleComponent;
+import org.opensearch.common.lifecycle.AbstractLifecycleComponent;
+import org.opensearch.core.common.breaker.CircuitBreaker;
/**
* Interface for Circuit Breaker services, which provide breakers to classes
diff --git a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerStats.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java
similarity index 97%
rename from server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerStats.java
rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java
index 94e63acd10648..0e53a38908a96 100644
--- a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerStats.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java
@@ -30,12 +30,12 @@
* GitHub history for details.
*/
-package org.opensearch.indices.breaker;
+package org.opensearch.core.indices.breaker;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/server/src/main/java/org/opensearch/indices/breaker/NoneCircuitBreakerService.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java
similarity index 91%
rename from server/src/main/java/org/opensearch/indices/breaker/NoneCircuitBreakerService.java
rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java
index bcb47b48a5f14..4095fd32b6d3c 100644
--- a/server/src/main/java/org/opensearch/indices/breaker/NoneCircuitBreakerService.java
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java
@@ -30,10 +30,10 @@
* GitHub history for details.
*/
-package org.opensearch.indices.breaker;
+package org.opensearch.core.indices.breaker;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.breaker.NoopCircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.NoopCircuitBreaker;
/**
* Class that returns a breaker that never breaks
diff --git a/libs/core/src/main/java/org/opensearch/core/indices/breaker/package-info.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/package-info.java
new file mode 100644
index 0000000000000..a98f9ab1d9f1e
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/package-info.java
@@ -0,0 +1,15 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Top Level core circuit breaker implementation
+ *
+ * @opensearch.internal
+ * @opensearch.experimental
+ */
+package org.opensearch.core.indices.breaker;
diff --git a/libs/core/src/main/java/org/opensearch/core/indices/package-info.java b/libs/core/src/main/java/org/opensearch/core/indices/package-info.java
new file mode 100644
index 0000000000000..c80edf3d2f01a
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/indices/package-info.java
@@ -0,0 +1,12 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/**
+ * Top Level Package for implementations used across indexes
+ */
+package org.opensearch.core.indices;
diff --git a/server/src/main/java/org/opensearch/transport/TransportMessage.java b/libs/core/src/main/java/org/opensearch/core/transport/TransportMessage.java
similarity index 94%
rename from server/src/main/java/org/opensearch/transport/TransportMessage.java
rename to libs/core/src/main/java/org/opensearch/core/transport/TransportMessage.java
index 78216047d530e..941babda40aa3 100644
--- a/server/src/main/java/org/opensearch/transport/TransportMessage.java
+++ b/libs/core/src/main/java/org/opensearch/core/transport/TransportMessage.java
@@ -30,11 +30,11 @@
* GitHub history for details.
*/
-package org.opensearch.transport;
+package org.opensearch.core.transport;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
/**
* Message over the transport interface
diff --git a/server/src/main/java/org/opensearch/transport/TransportResponse.java b/libs/core/src/main/java/org/opensearch/core/transport/TransportResponse.java
similarity index 98%
rename from server/src/main/java/org/opensearch/transport/TransportResponse.java
rename to libs/core/src/main/java/org/opensearch/core/transport/TransportResponse.java
index 73713fa1447a8..038069e93a51b 100644
--- a/server/src/main/java/org/opensearch/transport/TransportResponse.java
+++ b/libs/core/src/main/java/org/opensearch/core/transport/TransportResponse.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.transport;
+package org.opensearch.core.transport;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
diff --git a/libs/core/src/main/java/org/opensearch/core/transport/package-info.java b/libs/core/src/main/java/org/opensearch/core/transport/package-info.java
new file mode 100644
index 0000000000000..91db839f40305
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/transport/package-info.java
@@ -0,0 +1,10 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/** Core Transport Layer classes used across the OpenSearch core */
+package org.opensearch.core.transport;
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java
index c1409e551e47d..8e3c115c7ba58 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java
@@ -34,6 +34,8 @@
import org.opensearch.core.common.io.stream.Writeable;
+import java.io.IOException;
+import java.io.OutputStream;
import java.util.Locale;
/**
@@ -69,12 +71,20 @@ default String typeWithSubtype() {
XContent xContent();
+ boolean detectedXContent(final byte[] bytes, int offset, int length);
+
+ boolean detectedXContent(final CharSequence content, final int length);
+
default String mediaType() {
return mediaTypeWithoutParameters();
}
String mediaTypeWithoutParameters();
+ XContentBuilder contentBuilder() throws IOException;
+
+ XContentBuilder contentBuilder(final OutputStream os) throws IOException;
+
/**
* Accepts a format string, which is most of the time is equivalent to {@link MediaType#subtype()}
* and attempts to match the value to an {@link MediaType}.
@@ -82,7 +92,7 @@ default String mediaType() {
* This method will return {@code null} if no match is found
*/
static MediaType fromFormat(String mediaType) {
- return MediaTypeParserRegistry.fromFormat(mediaType);
+ return MediaTypeRegistry.fromFormat(mediaType);
}
/**
@@ -93,7 +103,7 @@ static MediaType fromFormat(String mediaType) {
*/
static MediaType fromMediaType(String mediaTypeHeaderValue) {
mediaTypeHeaderValue = removeVersionInMediaType(mediaTypeHeaderValue);
- return MediaTypeParserRegistry.fromMediaType(mediaTypeHeaderValue);
+ return MediaTypeRegistry.fromMediaType(mediaTypeHeaderValue);
}
/**
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeParserRegistry.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeParserRegistry.java
deleted file mode 100644
index 62a26b4458b09..0000000000000
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeParserRegistry.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- */
-
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/*
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package org.opensearch.core.xcontent;
-
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-
-/**
- * Parses supported internet media types
- *
- * @opensearch.internal
- */
-public final class MediaTypeParserRegistry {
- private static Map formatToMediaType = Map.of();
- private static Map typeWithSubtypeToMediaType = Map.of();
-
- // Default mediaType singleton
- private static MediaType DEFAULT_MEDIA_TYPE;
-
- public static void register(MediaType[] acceptedMediaTypes, Map additionalMediaTypes) {
- // ensures the map is not overwritten:
- Map typeMap = new HashMap<>(typeWithSubtypeToMediaType);
- Map formatMap = new HashMap<>(formatToMediaType);
- for (MediaType mediaType : acceptedMediaTypes) {
- if (formatMap.containsKey(mediaType.format())) {
- throw new IllegalArgumentException("unable to register mediaType: [" + mediaType.format() + "]. Type already exists.");
- }
- typeMap.put(mediaType.typeWithSubtype(), mediaType);
- formatMap.put(mediaType.format(), mediaType);
- }
- for (Map.Entry entry : additionalMediaTypes.entrySet()) {
- String typeWithSubtype = entry.getKey().toLowerCase(Locale.ROOT);
- if (typeMap.containsKey(typeWithSubtype)) {
- throw new IllegalArgumentException(
- "unable to register mediaType: ["
- + entry.getKey()
- + "]. "
- + "Type already exists and is mapped to: [."
- + entry.getValue().format()
- + "]"
- );
- }
-
- MediaType mediaType = entry.getValue();
- typeMap.put(typeWithSubtype, mediaType);
- formatMap.putIfAbsent(mediaType.format(), mediaType); // ignore if the additional type mapping already exists
- }
-
- formatToMediaType = Map.copyOf(formatMap);
- typeWithSubtypeToMediaType = Map.copyOf(typeMap);
- }
-
- public static MediaType fromMediaType(String mediaType) {
- ParsedMediaType parsedMediaType = parseMediaType(mediaType);
- return parsedMediaType != null ? parsedMediaType.getMediaType() : null;
- }
-
- public static MediaType fromFormat(String format) {
- if (format == null) {
- return null;
- }
- return formatToMediaType.get(format.toLowerCase(Locale.ROOT));
- }
-
- /**
- * parsing media type that follows https://tools.ietf.org/html/rfc7231#section-3.1.1.1
- * @param headerValue a header value from Accept or Content-Type
- * @return a parsed media-type
- */
- public static ParsedMediaType parseMediaType(String headerValue) {
- if (headerValue != null) {
- String[] split = headerValue.toLowerCase(Locale.ROOT).split(";");
-
- String[] typeSubtype = split[0].trim().split("/");
- if (typeSubtype.length == 2) {
- String type = typeSubtype[0];
- String subtype = typeSubtype[1];
- MediaType mediaType = typeWithSubtypeToMediaType.get(type + "/" + subtype);
- if (mediaType != null) {
- Map parameters = new HashMap<>();
- for (int i = 1; i < split.length; i++) {
- // spaces are allowed between parameters, but not between '=' sign
- String[] keyValueParam = split[i].trim().split("=");
- if (keyValueParam.length != 2 || hasSpaces(keyValueParam[0]) || hasSpaces(keyValueParam[1])) {
- return null;
- }
- parameters.put(keyValueParam[0], keyValueParam[1]);
- }
- return new ParsedMediaType(mediaType, parameters);
- }
- }
-
- }
- return null;
- }
-
- private static boolean hasSpaces(String s) {
- return s.trim().equals(s) == false;
- }
-
- /**
- * A media type object that contains all the information provided on a Content-Type or Accept header
- */
- public static class ParsedMediaType {
- private final Map parameters;
- private final MediaType mediaType;
-
- public ParsedMediaType(MediaType mediaType, Map parameters) {
- this.parameters = parameters;
- this.mediaType = mediaType;
- }
-
- public MediaType getMediaType() {
- return mediaType;
- }
-
- public Map getParameters() {
- return parameters;
- }
- }
-
- public static void setDefaultMediaType(final MediaType mediaType) {
- if (DEFAULT_MEDIA_TYPE != null) {
- throw new RuntimeException(
- "unable to reset the default media type from current default [" + DEFAULT_MEDIA_TYPE + "] to [" + mediaType + "]"
- );
- } else {
- DEFAULT_MEDIA_TYPE = mediaType;
- }
- }
-
- public static MediaType getDefaultMediaType() {
- return DEFAULT_MEDIA_TYPE;
- }
-}
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java
new file mode 100644
index 0000000000000..064f154602caf
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java
@@ -0,0 +1,406 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.core.xcontent;
+
+import org.opensearch.core.common.bytes.BytesArray;
+import org.opensearch.core.common.bytes.BytesReference;
+import org.opensearch.core.xcontent.spi.MediaTypeProvider;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.UncheckedIOException;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * Parses supported internet media types
+ *
+ * @opensearch.internal
+ */
+public final class MediaTypeRegistry {
+ private static Map formatToMediaType = Map.of();
+ private static Map typeWithSubtypeToMediaType = Map.of();
+
+ // Default mediaType singleton
+ private static MediaType DEFAULT_MEDIA_TYPE;
+ public static final int GUESS_HEADER_LENGTH = 20;
+
+ // JSON is a core type, so we create a static instance for implementations that require JSON format (e.g., tests)
+ // todo we should explore moving the concrete JSON implementation from the xcontent library to core
+ public static final MediaType JSON;
+
+ static {
+ List mediaTypes = new ArrayList<>();
+ Map amt = new HashMap<>();
+ for (MediaTypeProvider provider : ServiceLoader.load(MediaTypeProvider.class, MediaTypeProvider.class.getClassLoader())) {
+ mediaTypes.addAll(provider.getMediaTypes());
+ amt = Stream.of(amt, provider.getAdditionalMediaTypes())
+ .flatMap(map -> map.entrySet().stream())
+ .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue));
+ }
+ register(mediaTypes.toArray(new MediaType[0]), amt);
+ JSON = fromMediaType("application/json");
+ setDefaultMediaType(JSON);
+ }
+
+ private static void register(MediaType[] acceptedMediaTypes, Map additionalMediaTypes) {
+ // ensures the map is not overwritten:
+ Map typeMap = new HashMap<>(typeWithSubtypeToMediaType);
+ Map formatMap = new HashMap<>(formatToMediaType);
+ for (MediaType mediaType : acceptedMediaTypes) {
+ if (formatMap.containsKey(mediaType.format())) {
+ throw new IllegalArgumentException("unable to register mediaType: [" + mediaType.format() + "]. Type already exists.");
+ }
+ typeMap.put(mediaType.typeWithSubtype(), mediaType);
+ formatMap.put(mediaType.format(), mediaType);
+ }
+ for (Map.Entry entry : additionalMediaTypes.entrySet()) {
+ String typeWithSubtype = entry.getKey().toLowerCase(Locale.ROOT);
+ if (typeMap.containsKey(typeWithSubtype)) {
+ throw new IllegalArgumentException(
+ "unable to register mediaType: ["
+ + entry.getKey()
+ + "]. "
+ + "Type already exists and is mapped to: [."
+ + entry.getValue().format()
+ + "]"
+ );
+ }
+
+ MediaType mediaType = entry.getValue();
+ typeMap.put(typeWithSubtype, mediaType);
+ formatMap.putIfAbsent(mediaType.format(), mediaType); // ignore if the additional type mapping already exists
+ }
+
+ formatToMediaType = Map.copyOf(formatMap);
+ typeWithSubtypeToMediaType = Map.copyOf(typeMap);
+ }
+
+ public static MediaType fromMediaType(String mediaType) {
+ ParsedMediaType parsedMediaType = parseMediaType(mediaType);
+ return parsedMediaType != null ? parsedMediaType.getMediaType() : null;
+ }
+
+ public static MediaType fromFormat(String format) {
+ if (format == null) {
+ return null;
+ }
+ return formatToMediaType.get(format.toLowerCase(Locale.ROOT));
+ }
+
+ /**
+ * Returns a binary content builder for the provided content type.
+ */
+ public static XContentBuilder contentBuilder(MediaType type) throws IOException {
+ for (var mediaType : formatToMediaType.values()) {
+ if (type == mediaType) {
+ return type.contentBuilder();
+ }
+ }
+ throw new IllegalArgumentException("No matching content type for " + type);
+ }
+
+ public static XContentBuilder contentBuilder(MediaType type, OutputStream outputStream) throws IOException {
+ for (var mediaType : formatToMediaType.values()) {
+ if (type == mediaType) {
+ return type.contentBuilder(outputStream);
+ }
+ }
+ throw new IllegalArgumentException("No matching content type for " + type);
+ }
+
+ /**
+ * Guesses the content (type) based on the provided char sequence and returns the corresponding {@link XContent}
+ *
+ * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
+ * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
+ * This method is deprecated to prevent usages of it from spreading further without specific reasons.
+ */
+ @Deprecated
+ public static MediaType xContent(final byte[] data, int offset, int length) {
+ MediaType type = mediaTypeFromBytes(data, offset, length);
+ if (type == null) {
+ throw new XContentParseException("Failed to derive xcontent");
+ }
+ return type;
+ }
+
+ /**
+ * Guesses the content type based on the provided bytes and returns the corresponding {@link XContent}
+ *
+ * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
+ * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
+ * This method is deprecated to prevent usages of it from spreading further without specific reasons.
+ */
+ @Deprecated
+ public static MediaType xContent(byte[] data) {
+ return xContent(data, 0, data.length);
+ }
+
+ /**
+ * Guesses the content (type) based on the provided char sequence and returns the corresponding {@link XContent}
+ *
+ * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
+ * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
+ * This method is deprecated to prevent usages of it from spreading further without specific reasons.
+ */
+ @Deprecated
+ public static MediaType xContent(CharSequence content) {
+ MediaType type = xContentType(content);
+ if (type == null) {
+ throw new XContentParseException("Failed to derive xcontent");
+ }
+ return type;
+ }
+
+ /**
+ * Guesses the content type based on the provided char sequence.
+ *
+ * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
+ * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
+ * This method is deprecated to prevent usages of it from spreading further without specific reasons.
+ */
+ @Deprecated
+ public static MediaType xContentType(CharSequence content) {
+ int length = content.length() < GUESS_HEADER_LENGTH ? content.length() : GUESS_HEADER_LENGTH;
+ if (length == 0) {
+ return null;
+ }
+ for (var mediaType : formatToMediaType.values()) {
+ if (mediaType.detectedXContent(content, length)) {
+ return mediaType;
+ }
+ }
+
+ // fallback for json
+ for (int i = 0; i < length; i++) {
+ char c = content.charAt(i);
+ if (c == '{') {
+ return MediaType.fromMediaType("application/json");
+ }
+ if (Character.isWhitespace(c) == false) {
+ break;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Guesses the content type based on the provided input stream without consuming it.
+ *
+ * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
+ * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
+ * This method is deprecated to prevent usages of it from spreading further without specific reasons.
+ */
+ @Deprecated
+ public static MediaType xContentType(InputStream si) throws IOException {
+ /*
+ * We need to guess the content type. To do this, we look for the first non-whitespace character and then try to guess the content
+ * type on the GUESS_HEADER_LENGTH bytes that follow. We do this in a way that does not modify the initial read position in the
+ * underlying input stream. This is why the input stream must support mark/reset and why we repeatedly mark the read position and
+ * reset.
+ */
+ if (si.markSupported() == false) {
+ throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass());
+ }
+ si.mark(Integer.MAX_VALUE);
+ try {
+ // scan until we find the first non-whitespace character or the end of the stream
+ int current;
+ do {
+ current = si.read();
+ if (current == -1) {
+ return null;
+ }
+ } while (Character.isWhitespace((char) current));
+ // now guess the content type off the next GUESS_HEADER_LENGTH bytes including the current byte
+ final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH];
+ firstBytes[0] = (byte) current;
+ int read = 1;
+ while (read < GUESS_HEADER_LENGTH) {
+ final int r = si.read(firstBytes, read, GUESS_HEADER_LENGTH - read);
+ if (r == -1) {
+ break;
+ }
+ read += r;
+ }
+ return mediaTypeFromBytes(firstBytes, 0, read);
+ } finally {
+ si.reset();
+ }
+
+ }
+
+ /**
+ * Guesses the content type based on the provided bytes.
+ *
+ * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
+ * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
+ * This method is deprecated to prevent usages of it from spreading further without specific reasons.
+ */
+ @Deprecated
+ public static MediaType xContentType(BytesReference bytes) {
+ if (bytes instanceof BytesArray) {
+ final BytesArray array = (BytesArray) bytes;
+ return mediaTypeFromBytes(array.array(), array.offset(), array.length());
+ }
+ try {
+ final InputStream inputStream = bytes.streamInput();
+ assert inputStream.markSupported();
+ return xContentType(inputStream);
+ } catch (IOException e) {
+ assert false : "Should not happen, we're just reading bytes from memory";
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ /**
+ * Guesses the content type based on the provided bytes.
+ *
+ * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
+ * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
+ * This method is deprecated to prevent usages of it from spreading further without specific reasons.
+ */
+ @Deprecated
+ public static MediaType mediaTypeFromBytes(final byte[] data, int offset, int length) {
+ int totalLength = data.length;
+ if (totalLength == 0 || length == 0) {
+ return null;
+ } else if ((offset + length) > totalLength) {
+ return null;
+ }
+ for (var mediaType : formatToMediaType.values()) {
+ if (mediaType.detectedXContent(data, offset, length)) {
+ return mediaType;
+ }
+ }
+
+ // a last chance for JSON
+ int jsonStart = 0;
+ // JSON may be preceded by UTF-8 BOM
+ if (length > 3 && data[offset] == (byte) 0xEF && data[offset + 1] == (byte) 0xBB && data[offset + 2] == (byte) 0xBF) {
+ jsonStart = 3;
+ }
+
+ for (int i = jsonStart; i < length; i++) {
+ byte b = data[offset + i];
+ if (b == '{') {
+ return fromMediaType("application/json");
+ }
+ if (Character.isWhitespace(b) == false) {
+ break;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * parsing media type that follows https://tools.ietf.org/html/rfc7231#section-3.1.1.1
+ * @param headerValue a header value from Accept or Content-Type
+ * @return a parsed media-type
+ */
+ public static ParsedMediaType parseMediaType(String headerValue) {
+ if (headerValue != null) {
+ String[] split = headerValue.toLowerCase(Locale.ROOT).split(";");
+
+ String[] typeSubtype = split[0].trim().split("/");
+ if (typeSubtype.length == 2) {
+ String type = typeSubtype[0];
+ String subtype = typeSubtype[1];
+ MediaType mediaType = typeWithSubtypeToMediaType.get(type + "/" + subtype);
+ if (mediaType != null) {
+ Map parameters = new HashMap<>();
+ for (int i = 1; i < split.length; i++) {
+ // spaces are allowed between parameters, but not between '=' sign
+ String[] keyValueParam = split[i].trim().split("=");
+ if (keyValueParam.length != 2 || hasSpaces(keyValueParam[0]) || hasSpaces(keyValueParam[1])) {
+ return null;
+ }
+ parameters.put(keyValueParam[0], keyValueParam[1]);
+ }
+ return new ParsedMediaType(mediaType, parameters);
+ }
+ }
+
+ }
+ return null;
+ }
+
+ private static boolean hasSpaces(String s) {
+ return s.trim().equals(s) == false;
+ }
+
+ /**
+ * A media type object that contains all the information provided on a Content-Type or Accept header
+ */
+ public static class ParsedMediaType {
+ private final Map parameters;
+ private final MediaType mediaType;
+
+ public ParsedMediaType(MediaType mediaType, Map parameters) {
+ this.parameters = parameters;
+ this.mediaType = mediaType;
+ }
+
+ public MediaType getMediaType() {
+ return mediaType;
+ }
+
+ public Map getParameters() {
+ return parameters;
+ }
+ }
+
+ private static void setDefaultMediaType(final MediaType mediaType) {
+ if (DEFAULT_MEDIA_TYPE != null) {
+ throw new RuntimeException(
+ "unable to reset the default media type from current default [" + DEFAULT_MEDIA_TYPE + "] to [" + mediaType + "]"
+ );
+ } else {
+ DEFAULT_MEDIA_TYPE = mediaType;
+ }
+ }
+
+ public static MediaType getDefaultMediaType() {
+ return DEFAULT_MEDIA_TYPE;
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
index 061837d27ed0a..dfd1449ef0e0b 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
@@ -32,6 +32,8 @@
package org.opensearch.core.xcontent;
+import org.opensearch.core.common.bytes.BytesReference;
+
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.Flushable;
@@ -151,6 +153,14 @@ public static XContentBuilder builder(XContent xContent, Set includes, S
DATE_TRANSFORMERS = Collections.unmodifiableMap(dateTransformers);
}
+ /**
+ * Returns a string representation of the builder (only applicable for text based xcontent).
+ */
+ @Override
+ public String toString() {
+ return BytesReference.bytes(this).utf8ToString();
+ }
+
/**
* The writer interface for the serializable content builder
*
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/spi/MediaTypeProvider.java b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/MediaTypeProvider.java
new file mode 100644
index 0000000000000..eeaadc1698df6
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/MediaTypeProvider.java
@@ -0,0 +1,29 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.core.xcontent.spi;
+
+import org.opensearch.core.xcontent.MediaType;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Service Provider Interface for plugins, modules, extensions providing
+ * their own Media Types
+ *
+ * @opensearch.experimental
+ * @opensearch.api
+ */
+public interface MediaTypeProvider {
+ /** Extensions that implement their own concrete {@link MediaType}s provide them through this interface method */
+ List getMediaTypes();
+
+ /** Extensions that implement additional {@link MediaType} aliases provide them through this interface method */
+ Map getAdditionalMediaTypes();
+}
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/spi/package-info.java b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/package-info.java
new file mode 100644
index 0000000000000..67ccd981dafa8
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/package-info.java
@@ -0,0 +1,10 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/** Service Provider Interface for extensible media types */
+package org.opensearch.core.xcontent.spi;
diff --git a/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java b/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java
index d037b062e95d0..9801e9cbcdb44 100644
--- a/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java
+++ b/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java
@@ -39,7 +39,7 @@
import org.apache.lucene.store.LockObtainFailedException;
import org.opensearch.OpenSearchException;
import org.opensearch.action.support.broadcast.BroadcastShardOperationFailedException;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.core.common.io.stream.StreamInput;
diff --git a/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java b/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java
index 532251e02e685..b79bb6fc89f9e 100644
--- a/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java
+++ b/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java
@@ -9,10 +9,54 @@
package org.opensearch.core.common;
import org.opensearch.common.util.set.Sets;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.core.xcontent.ToXContent;
+import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.test.OpenSearchTestCase;
+import java.util.Collections;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.is;
+
/** tests for Strings utility class */
public class StringsTests extends OpenSearchTestCase {
+ public void testIsAllOrWildCardString() {
+ assertThat(Strings.isAllOrWildcard("_all"), is(true));
+ assertThat(Strings.isAllOrWildcard("*"), is(true));
+ assertThat(Strings.isAllOrWildcard("foo"), is(false));
+ assertThat(Strings.isAllOrWildcard(""), is(false));
+ assertThat(Strings.isAllOrWildcard((String) null), is(false));
+ }
+
+ public void testSubstring() {
+ assertEquals(null, Strings.substring(null, 0, 1000));
+ assertEquals("foo", Strings.substring("foo", 0, 1000));
+ assertEquals("foo", Strings.substring("foo", 0, 3));
+ assertEquals("oo", Strings.substring("foo", 1, 3));
+ assertEquals("oo", Strings.substring("foo", 1, 100));
+ assertEquals("f", Strings.substring("foo", 0, 1));
+ }
+
+ public void testCleanTruncate() {
+ assertEquals(null, Strings.cleanTruncate(null, 10));
+ assertEquals("foo", Strings.cleanTruncate("foo", 10));
+ assertEquals("foo", Strings.cleanTruncate("foo", 3));
+ // Throws out high surrogates
+ assertEquals("foo", Strings.cleanTruncate("foo\uD83D\uDEAB", 4));
+ // But will keep the whole character
+ assertEquals("foo\uD83D\uDEAB", Strings.cleanTruncate("foo\uD83D\uDEAB", 5));
+ /*
+ * Doesn't take care around combining marks. This example has its
+ * meaning changed because that last codepoint is supposed to combine
+ * backwards into the find "o" and be represented as the "o" with a
+ * circle around it with a slash through it. As in "no 'o's allowed
+ * here.
+ */
+ assertEquals("o", org.opensearch.core.common.Strings.cleanTruncate("o\uD83D\uDEAB", 1));
+ assertEquals("", org.opensearch.core.common.Strings.cleanTruncate("foo", 0));
+ }
+
public void testSplitStringToSet() {
assertEquals(Strings.tokenizeByCommaToSet(null), Sets.newHashSet());
assertEquals(Strings.tokenizeByCommaToSet(""), Sets.newHashSet());
@@ -25,4 +69,49 @@ public void testSplitStringToSet() {
assertEquals(Strings.tokenizeByCommaToSet(" aa "), Sets.newHashSet("aa"));
assertEquals(Strings.tokenizeByCommaToSet(" "), Sets.newHashSet());
}
+
+ public void testToStringToXContent() {
+ final ToXContent toXContent;
+ final boolean error;
+ if (randomBoolean()) {
+ if (randomBoolean()) {
+ error = false;
+ toXContent = (builder, params) -> builder.field("ok", "here").field("catastrophe", "");
+ } else {
+ error = true;
+ toXContent = (builder, params) -> builder.startObject().field("ok", "here").field("catastrophe", "").endObject();
+ }
+ } else {
+ if (randomBoolean()) {
+ error = false;
+ toXContent = (ToXContentObject) (builder, params) -> builder.startObject()
+ .field("ok", "here")
+ .field("catastrophe", "")
+ .endObject();
+ } else {
+ error = true;
+ toXContent = (ToXContentObject) (builder, params) -> builder.field("ok", "here").field("catastrophe", "");
+ }
+ }
+
+ String toString = Strings.toString(MediaTypeRegistry.JSON, toXContent);
+ if (error) {
+ assertThat(toString, containsString("\"error\":\"error building toString out of XContent:"));
+ assertThat(toString, containsString("\"stack_trace\":"));
+ } else {
+ assertThat(toString, containsString("\"ok\":\"here\""));
+ assertThat(toString, containsString("\"catastrophe\":\"\""));
+ }
+ }
+
+ public void testToStringToXContentWithOrWithoutParams() {
+ ToXContent toXContent = (builder, params) -> builder.field("color_from_param", params.param("color", "red"));
+ // Rely on the default value of "color" param when params are not passed
+ assertThat(Strings.toString(MediaTypeRegistry.JSON, toXContent), containsString("\"color_from_param\":\"red\""));
+ // Pass "color" param explicitly
+ assertThat(
+ Strings.toString(MediaTypeRegistry.JSON, toXContent, new ToXContent.MapParams(Collections.singletonMap("color", "blue"))),
+ containsString("\"color_from_param\":\"blue\"")
+ );
+ }
}
diff --git a/server/src/test/java/org/opensearch/common/unit/ByteSizeUnitTests.java b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeUnitTests.java
similarity index 91%
rename from server/src/test/java/org/opensearch/common/unit/ByteSizeUnitTests.java
rename to libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeUnitTests.java
index c6bcceec99fbd..07b9131602ac3 100644
--- a/server/src/test/java/org/opensearch/common/unit/ByteSizeUnitTests.java
+++ b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeUnitTests.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.common.unit;
+package org.opensearch.core.common.unit;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -38,12 +38,12 @@
import java.io.IOException;
-import static org.opensearch.common.unit.ByteSizeUnit.BYTES;
-import static org.opensearch.common.unit.ByteSizeUnit.GB;
-import static org.opensearch.common.unit.ByteSizeUnit.KB;
-import static org.opensearch.common.unit.ByteSizeUnit.MB;
-import static org.opensearch.common.unit.ByteSizeUnit.PB;
-import static org.opensearch.common.unit.ByteSizeUnit.TB;
+import static org.opensearch.core.common.unit.ByteSizeUnit.BYTES;
+import static org.opensearch.core.common.unit.ByteSizeUnit.GB;
+import static org.opensearch.core.common.unit.ByteSizeUnit.KB;
+import static org.opensearch.core.common.unit.ByteSizeUnit.MB;
+import static org.opensearch.core.common.unit.ByteSizeUnit.PB;
+import static org.opensearch.core.common.unit.ByteSizeUnit.TB;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
diff --git a/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeValueTests.java
similarity index 97%
rename from server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java
rename to libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeValueTests.java
index 99c1feb78527f..def1694a72ba4 100644
--- a/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java
+++ b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeValueTests.java
@@ -30,7 +30,7 @@
* GitHub history for details.
*/
-package org.opensearch.common.unit;
+package org.opensearch.core.common.unit;
import org.opensearch.OpenSearchParseException;
import org.opensearch.core.common.io.stream.Writeable.Reader;
@@ -336,12 +336,10 @@ public void testParseInvalidNumber() throws IOException {
public void testParseFractionalNumber() throws IOException {
ByteSizeUnit unit = randomValueOtherThan(ByteSizeUnit.BYTES, () -> randomFrom(ByteSizeUnit.values()));
String fractionalValue = "23.5" + unit.getSuffix();
- ByteSizeValue instance = ByteSizeValue.parseBytesSizeValue(fractionalValue, "test");
- assertEquals(fractionalValue, instance.toString());
- assertWarnings(
- "Fractional bytes values are deprecated. Use non-fractional bytes values instead: ["
- + fractionalValue
- + "] found for setting [test]"
+ // test exception is thrown: fractional byte size values has been deprecated since Legacy 6.2
+ OpenSearchParseException e = expectThrows(
+ OpenSearchParseException.class,
+ () -> ByteSizeValue.parseBytesSizeValue(fractionalValue, "test")
);
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java
index 76a2046dd768a..9f423bc9abad3 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java
@@ -32,19 +32,14 @@
package org.opensearch.common.xcontent;
-import com.fasterxml.jackson.dataformat.cbor.CBORConstants;
-import com.fasterxml.jackson.dataformat.smile.SmileConstants;
-import org.opensearch.common.xcontent.cbor.CborXContent;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.smile.SmileXContent;
import org.opensearch.common.xcontent.yaml.YamlXContent;
-import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.core.xcontent.XContentParseException;
import java.io.IOException;
-import java.io.InputStream;
import java.io.OutputStream;
/**
@@ -52,13 +47,11 @@
*/
public class XContentFactory {
- static final int GUESS_HEADER_LENGTH = 20;
-
/**
* Returns a content builder using JSON format ({@link org.opensearch.common.xcontent.XContentType#JSON}.
*/
public static XContentBuilder jsonBuilder() throws IOException {
- return contentBuilder(XContentType.JSON);
+ return MediaTypeRegistry.contentBuilder(XContentType.JSON);
}
/**
@@ -72,7 +65,7 @@ public static XContentBuilder jsonBuilder(OutputStream os) throws IOException {
* Returns a content builder using SMILE format ({@link org.opensearch.common.xcontent.XContentType#SMILE}.
*/
public static XContentBuilder smileBuilder() throws IOException {
- return contentBuilder(XContentType.SMILE);
+ return MediaTypeRegistry.contentBuilder(XContentType.SMILE);
}
/**
@@ -86,7 +79,7 @@ public static XContentBuilder smileBuilder(OutputStream os) throws IOException {
* Returns a content builder using YAML format ({@link org.opensearch.common.xcontent.XContentType#YAML}.
*/
public static XContentBuilder yamlBuilder() throws IOException {
- return contentBuilder(XContentType.YAML);
+ return MediaTypeRegistry.contentBuilder(XContentType.YAML);
}
/**
@@ -100,271 +93,6 @@ public static XContentBuilder yamlBuilder(OutputStream os) throws IOException {
* Returns a content builder using CBOR format ({@link org.opensearch.common.xcontent.XContentType#CBOR}.
*/
public static XContentBuilder cborBuilder() throws IOException {
- return contentBuilder(XContentType.CBOR);
- }
-
- /**
- * Constructs a new cbor builder that will output the result into the provided output stream.
- */
- public static XContentBuilder cborBuilder(OutputStream os) throws IOException {
- return new XContentBuilder(CborXContent.cborXContent, os);
- }
-
- /**
- * Constructs a xcontent builder that will output the result into the provided output stream.
- */
- public static XContentBuilder contentBuilder(MediaType type, OutputStream outputStream) throws IOException {
- if (type == XContentType.JSON) {
- return jsonBuilder(outputStream);
- } else if (type == XContentType.SMILE) {
- return smileBuilder(outputStream);
- } else if (type == XContentType.YAML) {
- return yamlBuilder(outputStream);
- } else if (type == XContentType.CBOR) {
- return cborBuilder(outputStream);
- }
- throw new IllegalArgumentException("No matching content type for " + type);
- }
-
- /**
- * Returns a binary content builder for the provided media type.
- */
- public static XContentBuilder contentBuilder(MediaType type) throws IOException {
- if (type instanceof XContentType) {
- return contentBuilder((XContentType) (type));
- }
- throw new IllegalArgumentException("Content type [" + type.getClass().getName() + "] not supported");
- }
-
- /**
- * Returns a binary content builder for the provided content type.
- */
- public static XContentBuilder contentBuilder(XContentType type) throws IOException {
- if (type == XContentType.JSON) {
- return JsonXContent.contentBuilder();
- } else if (type == XContentType.SMILE) {
- return SmileXContent.contentBuilder();
- } else if (type == XContentType.YAML) {
- return YamlXContent.contentBuilder();
- } else if (type == XContentType.CBOR) {
- return CborXContent.contentBuilder();
- }
- throw new IllegalArgumentException("No matching content type for " + type);
- }
-
- /**
- * Returns the {@link XContent} for the provided content type.
- */
- public static XContent xContent(MediaType type) {
- if (type == null) {
- throw new IllegalArgumentException("Cannot get xcontent for unknown type");
- }
- return type.xContent();
- }
-
- /**
- * Guesses the content type based on the provided char sequence.
- *
- * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
- * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
- * This method is deprecated to prevent usages of it from spreading further without specific reasons.
- */
- @Deprecated
- public static XContentType xContentType(CharSequence content) {
- int length = content.length() < GUESS_HEADER_LENGTH ? content.length() : GUESS_HEADER_LENGTH;
- if (length == 0) {
- return null;
- }
- char first = content.charAt(0);
- if (first == '{') {
- return XContentType.JSON;
- }
- // Should we throw a failure here? Smile idea is to use it in bytes....
- if (length > 2
- && first == SmileConstants.HEADER_BYTE_1
- && content.charAt(1) == SmileConstants.HEADER_BYTE_2
- && content.charAt(2) == SmileConstants.HEADER_BYTE_3) {
- return XContentType.SMILE;
- }
- if (length > 2 && first == '-' && content.charAt(1) == '-' && content.charAt(2) == '-') {
- return XContentType.YAML;
- }
-
- // CBOR is not supported
-
- for (int i = 0; i < length; i++) {
- char c = content.charAt(i);
- if (c == '{') {
- return XContentType.JSON;
- }
- if (Character.isWhitespace(c) == false) {
- break;
- }
- }
- return null;
- }
-
- /**
- * Guesses the content (type) based on the provided char sequence and returns the corresponding {@link XContent}
- *
- * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
- * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
- * This method is deprecated to prevent usages of it from spreading further without specific reasons.
- */
- @Deprecated
- public static XContent xContent(CharSequence content) {
- XContentType type = xContentType(content);
- if (type == null) {
- throw new XContentParseException("Failed to derive xcontent");
- }
- return xContent(type);
- }
-
- /**
- * Guesses the content type based on the provided bytes and returns the corresponding {@link XContent}
- *
- * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
- * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
- * This method is deprecated to prevent usages of it from spreading further without specific reasons.
- */
- @Deprecated
- public static XContent xContent(byte[] data) {
- return xContent(data, 0, data.length);
- }
-
- /**
- * Guesses the content type based on the provided bytes and returns the corresponding {@link XContent}
- *
- * @deprecated guessing the content type should not be needed ideally. We should rather know the content type upfront or read it
- * from headers. Till we fixed the REST layer to read the Content-Type header, that should be the only place where guessing is needed.
- */
- @Deprecated
- public static XContent xContent(byte[] data, int offset, int length) {
- XContentType type = xContentType(data, offset, length);
- if (type == null) {
- throw new XContentParseException("Failed to derive xcontent");
- }
- return xContent(type);
- }
-
- /**
- * Guesses the content type based on the provided input stream without consuming it.
- *
- * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
- * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
- * This method is deprecated to prevent usages of it from spreading further without specific reasons.
- */
- @Deprecated
- public static XContentType xContentType(InputStream si) throws IOException {
- /*
- * We need to guess the content type. To do this, we look for the first non-whitespace character and then try to guess the content
- * type on the GUESS_HEADER_LENGTH bytes that follow. We do this in a way that does not modify the initial read position in the
- * underlying input stream. This is why the input stream must support mark/reset and why we repeatedly mark the read position and
- * reset.
- */
- if (si.markSupported() == false) {
- throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass());
- }
- si.mark(Integer.MAX_VALUE);
- try {
- // scan until we find the first non-whitespace character or the end of the stream
- int current;
- do {
- current = si.read();
- if (current == -1) {
- return null;
- }
- } while (Character.isWhitespace((char) current));
- // now guess the content type off the next GUESS_HEADER_LENGTH bytes including the current byte
- final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH];
- firstBytes[0] = (byte) current;
- int read = 1;
- while (read < GUESS_HEADER_LENGTH) {
- final int r = si.read(firstBytes, read, GUESS_HEADER_LENGTH - read);
- if (r == -1) {
- break;
- }
- read += r;
- }
- return xContentType(firstBytes, 0, read);
- } finally {
- si.reset();
- }
-
- }
-
- /**
- * Guesses the content type based on the provided bytes.
- *
- * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
- * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
- * This method is deprecated to prevent usages of it from spreading further without specific reasons.
- */
- @Deprecated
- public static XContentType xContentType(byte[] bytes) {
- return xContentType(bytes, 0, bytes.length);
- }
-
- /**
- * Guesses the content type based on the provided bytes.
- *
- * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type.
- * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed.
- * This method is deprecated to prevent usages of it from spreading further without specific reasons.
- */
- @Deprecated
- public static XContentType xContentType(byte[] bytes, int offset, int length) {
- int totalLength = bytes.length;
- if (totalLength == 0 || length == 0) {
- return null;
- } else if ((offset + length) > totalLength) {
- return null;
- }
- byte first = bytes[offset];
- if (first == '{') {
- return XContentType.JSON;
- }
- if (length > 2
- && first == SmileConstants.HEADER_BYTE_1
- && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2
- && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) {
- return XContentType.SMILE;
- }
- if (length > 2 && first == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-') {
- return XContentType.YAML;
- }
- // CBOR logic similar to CBORFactory#hasCBORFormat
- if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) {
- return XContentType.CBOR;
- }
- if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first) && length > 2) {
- // Actually, specific "self-describe tag" is a very good indicator
- if (first == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) {
- return XContentType.CBOR;
- }
- }
- // for small objects, some encoders just encode as major type object, we can safely
- // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort
- if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) {
- return XContentType.CBOR;
- }
-
- int jsonStart = 0;
- // JSON may be preceded by UTF-8 BOM
- if (length > 3 && first == (byte) 0xEF && bytes[offset + 1] == (byte) 0xBB && bytes[offset + 2] == (byte) 0xBF) {
- jsonStart = 3;
- }
-
- // a last chance for JSON
- for (int i = jsonStart; i < length; i++) {
- byte b = bytes[offset + i];
- if (b == '{') {
- return XContentType.JSON;
- }
- if (Character.isWhitespace(b) == false) {
- break;
- }
- }
- return null;
+ return MediaTypeRegistry.contentBuilder(XContentType.CBOR);
}
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java
index 023caa49e1f39..7026dbd5e78c3 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java
@@ -32,17 +32,19 @@
package org.opensearch.common.xcontent;
+import com.fasterxml.jackson.dataformat.cbor.CBORConstants;
+import com.fasterxml.jackson.dataformat.smile.SmileConstants;
import org.opensearch.common.xcontent.cbor.CborXContent;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.smile.SmileXContent;
import org.opensearch.common.xcontent.yaml.YamlXContent;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.xcontent.MediaType;
-import org.opensearch.core.xcontent.MediaTypeParserRegistry;
import org.opensearch.core.xcontent.XContent;
+import org.opensearch.core.xcontent.XContentBuilder;
import java.io.IOException;
-import java.util.Map;
+import java.io.OutputStream;
/**
* The content type of {@link XContent}.
@@ -72,6 +74,26 @@ public String subtype() {
public XContent xContent() {
return JsonXContent.jsonXContent;
}
+
+ @Override
+ public boolean detectedXContent(final byte[] bytes, int offset, int length) {
+ return bytes[offset] == '{';
+ }
+
+ @Override
+ public boolean detectedXContent(final CharSequence content, final int length) {
+ return content.charAt(0) == '{';
+ }
+
+ @Override
+ public XContentBuilder contentBuilder() throws IOException {
+ return JsonXContent.contentBuilder();
+ }
+
+ @Override
+ public XContentBuilder contentBuilder(final OutputStream os) throws IOException {
+ return new XContentBuilder(JsonXContent.jsonXContent, os);
+ }
},
/**
* The jackson based smile binary format. Fast and compact binary format.
@@ -91,6 +113,32 @@ public String subtype() {
public XContent xContent() {
return SmileXContent.smileXContent;
}
+
+ @Override
+ public boolean detectedXContent(final byte[] bytes, int offset, int length) {
+ return length > 2
+ && bytes[offset] == SmileConstants.HEADER_BYTE_1
+ && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2
+ && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3;
+ }
+
+ @Override
+ public boolean detectedXContent(final CharSequence content, final int length) {
+ return length > 2
+ && content.charAt(0) == SmileConstants.HEADER_BYTE_1
+ && content.charAt(1) == SmileConstants.HEADER_BYTE_2
+ && content.charAt(2) == SmileConstants.HEADER_BYTE_3;
+ }
+
+ @Override
+ public XContentBuilder contentBuilder() throws IOException {
+ return SmileXContent.contentBuilder();
+ }
+
+ @Override
+ public XContentBuilder contentBuilder(final OutputStream os) throws IOException {
+ return new XContentBuilder(SmileXContent.smileXContent, os);
+ }
},
/**
* A YAML based content type.
@@ -110,6 +158,26 @@ public String subtype() {
public XContent xContent() {
return YamlXContent.yamlXContent;
}
+
+ @Override
+ public boolean detectedXContent(final byte[] bytes, int offset, int length) {
+ return length > 2 && bytes[offset] == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-';
+ }
+
+ @Override
+ public boolean detectedXContent(final CharSequence content, final int length) {
+ return length > 2 && content.charAt(0) == '-' && content.charAt(1) == '-' && content.charAt(2) == '-';
+ }
+
+ @Override
+ public XContentBuilder contentBuilder() throws IOException {
+ return YamlXContent.contentBuilder();
+ }
+
+ @Override
+ public XContentBuilder contentBuilder(final OutputStream os) throws IOException {
+ return new XContentBuilder(YamlXContent.yamlXContent, os);
+ }
},
/**
* A CBOR based content type.
@@ -129,12 +197,42 @@ public String subtype() {
public XContent xContent() {
return CborXContent.cborXContent;
}
- };
- static {
- /** a parser of media types */
- MediaTypeParserRegistry.register(XContentType.values(), Map.of("application/*", JSON, "application/x-ndjson", JSON));
- }
+ @Override
+ public boolean detectedXContent(final byte[] bytes, int offset, int length) {
+ // CBOR logic similar to CBORFactory#hasCBORFormat
+ if (bytes[offset] == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) {
+ return true;
+ }
+ if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, bytes[offset]) && length > 2) {
+ // Actually, specific "self-describe tag" is a very good indicator
+ if (bytes[offset] == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) {
+ return true;
+ }
+ }
+ // for small objects, some encoders just encode as major type object, we can safely
+ // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort
+ if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, bytes[offset])) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public boolean detectedXContent(final CharSequence content, final int length) {
+ return false;
+ }
+
+ @Override
+ public XContentBuilder contentBuilder() throws IOException {
+ return CborXContent.contentBuilder();
+ }
+
+ @Override
+ public XContentBuilder contentBuilder(final OutputStream os) throws IOException {
+ return new XContentBuilder(CborXContent.cborXContent, os);
+ }
+ };
private int index;
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java
index 9164b5216f9a5..3d4bbba9cc50c 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java
@@ -43,9 +43,9 @@
import com.fasterxml.jackson.core.util.JsonGeneratorDelegate;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.XContent;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentGenerator;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -339,7 +339,7 @@ public void writeRawField(String name, InputStream content) throws IOException {
// needed for the XContentFactory.xContentType call
content = new BufferedInputStream(content);
}
- XContentType contentType = XContentFactory.xContentType(content);
+ MediaType contentType = MediaTypeRegistry.xContentType(content);
if (contentType == null) {
throw new IllegalArgumentException("Can't write raw bytes whose xcontent-type can't be guessed");
}
@@ -354,7 +354,7 @@ public void writeRawField(String name, InputStream content, MediaType mediaType)
if (mayWriteRawData(mediaType) == false) {
// EMPTY is safe here because we never call namedObject when writing raw data
try (
- XContentParser parser = XContentFactory.xContent(mediaType)
+ XContentParser parser = mediaType.xContent()
// It's okay to pass the throwing deprecation handler
// because we should not be writing raw fields when
// generating JSON
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/XContentProvider.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/XContentProvider.java
new file mode 100644
index 0000000000000..af5ab67507b81
--- /dev/null
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/XContentProvider.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.xcontent.spi;
+
+import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.spi.MediaTypeProvider;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Media Type implementations provided by xcontent library
+ *
+ * @opensearch.internal
+ */
+public class XContentProvider implements MediaTypeProvider {
+ /** Returns the concrete {@link MediaType} provided by the xcontent library */
+ @Override
+ public List getMediaTypes() {
+ return List.of(XContentType.values());
+ }
+
+ /** Returns the additional {@link MediaType} aliases provided by the xcontent library */
+ @Override
+ public Map getAdditionalMediaTypes() {
+ return Map.of("application/*", XContentType.JSON, "application/x-ndjson", XContentType.JSON);
+ }
+}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/package-info.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/package-info.java
new file mode 100644
index 0000000000000..c265021f12763
--- /dev/null
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/package-info.java
@@ -0,0 +1,10 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/** SPI implementation for the xcontent library */
+package org.opensearch.common.xcontent.spi;
diff --git a/libs/x-content/src/main/resources/META-INF/services/org.opensearch.core.xcontent.spi.MediaTypeProvider b/libs/x-content/src/main/resources/META-INF/services/org.opensearch.core.xcontent.spi.MediaTypeProvider
new file mode 100644
index 0000000000000..ce3fab93087dd
--- /dev/null
+++ b/libs/x-content/src/main/resources/META-INF/services/org.opensearch.core.xcontent.spi.MediaTypeProvider
@@ -0,0 +1,9 @@
+#
+# SPDX-License-Identifier: Apache-2.0
+#
+# The OpenSearch Contributors require contributions made to
+# this file be licensed under the Apache-2.0 license or a
+# compatible open source license.
+#
+
+org.opensearch.common.xcontent.spi.XContentProvider
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java
index 15492b7351984..64d36f0a8b78f 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java
@@ -32,7 +32,7 @@
package org.opensearch.common.xcontent;
-import org.opensearch.core.xcontent.MediaTypeParserRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.test.OpenSearchTestCase;
import java.util.Collections;
@@ -46,40 +46,37 @@ public class MediaTypeParserTests extends OpenSearchTestCase {
public void testJsonWithParameters() throws Exception {
String mediaType = "application/json";
- assertThat(MediaTypeParserRegistry.parseMediaType(mediaType).getParameters(), equalTo(Collections.emptyMap()));
- assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + ";").getParameters(), equalTo(Collections.emptyMap()));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType).getParameters(), equalTo(Collections.emptyMap()));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType + ";").getParameters(), equalTo(Collections.emptyMap()));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; charset=UTF-8").getParameters(), equalTo(Map.of("charset", "utf-8")));
assertThat(
- MediaTypeParserRegistry.parseMediaType(mediaType + "; charset=UTF-8").getParameters(),
- equalTo(Map.of("charset", "utf-8"))
- );
- assertThat(
- MediaTypeParserRegistry.parseMediaType(mediaType + "; custom=123;charset=UTF-8").getParameters(),
+ MediaTypeRegistry.parseMediaType(mediaType + "; custom=123;charset=UTF-8").getParameters(),
equalTo(Map.of("charset", "utf-8", "custom", "123"))
);
}
public void testWhiteSpaceInTypeSubtype() {
String mediaType = " application/json ";
- assertThat(MediaTypeParserRegistry.parseMediaType(mediaType).getMediaType(), equalTo(XContentType.JSON));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType).getMediaType(), equalTo(XContentType.JSON));
assertThat(
- MediaTypeParserRegistry.parseMediaType(mediaType + "; custom=123; charset=UTF-8").getParameters(),
+ MediaTypeRegistry.parseMediaType(mediaType + "; custom=123; charset=UTF-8").getParameters(),
equalTo(Map.of("charset", "utf-8", "custom", "123"))
);
assertThat(
- MediaTypeParserRegistry.parseMediaType(mediaType + "; custom=123;\n charset=UTF-8").getParameters(),
+ MediaTypeRegistry.parseMediaType(mediaType + "; custom=123;\n charset=UTF-8").getParameters(),
equalTo(Map.of("charset", "utf-8", "custom", "123"))
);
mediaType = " application / json ";
- assertThat(MediaTypeParserRegistry.parseMediaType(mediaType), is(nullValue()));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType), is(nullValue()));
}
public void testInvalidParameters() {
String mediaType = "application/json";
- assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + "; keyvalueNoEqualsSign"), is(nullValue()));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; keyvalueNoEqualsSign"), is(nullValue()));
- assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + "; key = value"), is(nullValue()));
- assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + "; key="), is(nullValue()));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; key = value"), is(nullValue()));
+ assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; key="), is(nullValue()));
}
}
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java
index cd59bf59fe15d..6e7de4aa6bfe5 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java
@@ -33,7 +33,6 @@
import org.opensearch.common.CheckedFunction;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentParserUtils;
import org.opensearch.core.xcontent.ObjectParser;
import org.opensearch.core.xcontent.ObjectParser.NamedObjectParser;
@@ -449,7 +448,7 @@ public void testAllVariants() throws IOException {
}
builder.field("string_or_null", nullValue ? null : "5");
builder.endObject();
- XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder));
+ XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString());
class TestStruct {
int int_field;
int nullableIntField;
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java
index a4aca80918284..1d2a66ea1f78f 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java
@@ -33,7 +33,7 @@
package org.opensearch.common.xcontent;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.ConstructingObjectParser;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
index 3552514af0aa8..fab61753eb739 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
@@ -37,7 +37,6 @@
import com.fasterxml.jackson.dataformat.yaml.JacksonYAMLParseException;
import org.opensearch.common.CheckedSupplier;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.xcontent.cbor.CborXContent;
import org.opensearch.common.xcontent.json.JsonXContent;
@@ -366,7 +365,7 @@ public void testReadBooleans() throws IOException {
public void testEmptyList() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startArray("some_array").endArray().endObject();
- try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) {
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("some_array", parser.currentName());
@@ -388,7 +387,7 @@ public void testSimpleList() throws IOException {
.endArray()
.endObject();
- try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) {
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("some_array", parser.currentName());
@@ -416,7 +415,7 @@ public void testNestedList() throws IOException {
.endArray()
.endObject();
- try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) {
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("some_array", parser.currentName());
@@ -440,7 +439,7 @@ public void testNestedMapInList() throws IOException {
.endArray()
.endObject();
- try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) {
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("some_array", parser.currentName());
@@ -516,7 +515,7 @@ public void testSubParserObject() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder();
int numberOfTokens;
numberOfTokens = generateRandomObjectForMarking(builder);
- String content = Strings.toString(builder);
+ String content = builder.toString();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
@@ -564,7 +563,7 @@ public void testSubParserArray() throws IOException {
builder.endArray();
builder.endObject();
- String content = Strings.toString(builder);
+ String content = builder.toString();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
@@ -597,7 +596,7 @@ public void testSubParserArray() throws IOException {
public void testCreateSubParserAtAWrongPlace() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder();
generateRandomObjectForMarking(builder);
- String content = Strings.toString(builder);
+ String content = builder.toString();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
@@ -611,7 +610,7 @@ public void testCreateSubParserAtAWrongPlace() throws IOException {
public void testCreateRootSubParser() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder();
int numberOfTokens = generateRandomObjectForMarking(builder);
- String content = Strings.toString(builder);
+ String content = builder.toString();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
diff --git a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java
index c5dc68ff4c800..0384615c2f2f5 100644
--- a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java
+++ b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java
@@ -37,7 +37,7 @@
import org.opensearch.common.util.MockPageCacheRecycler;
import org.opensearch.core.xcontent.ContextParser;
import org.opensearch.core.xcontent.NamedXContentRegistry;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.plugins.SearchPlugin;
import org.opensearch.script.ScriptService;
import org.opensearch.search.aggregations.Aggregation;
diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
index f7c9747e1a163..5df5912daf461 100644
--- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
+++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java
@@ -10,7 +10,6 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.common.document.DocumentField;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.settings.Settings;
@@ -256,7 +255,7 @@ public void setupSuiteScopeCluster() throws Exception {
long totalHits = response.getHits().getTotalHits().value;
XContentBuilder builder = XContentFactory.jsonBuilder();
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
- logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder));
+ logger.info("Full high_card_idx Response Content:\n{ {} }", builder.toString());
for (int i = 0; i < totalHits; i++) {
SearchHit searchHit = response.getHits().getAt(i);
assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx"));
diff --git a/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java
index 246ece4342cff..da1d97260ec96 100644
--- a/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java
+++ b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java
@@ -10,7 +10,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.awt.geom.Point2D;
import java.util.ArrayList;
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java
index 3bd1137975800..b76fe41c8e67d 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java
@@ -32,7 +32,7 @@
package org.opensearch.ingest.common;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import java.util.Map;
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java
index 5f61091495cd5..b66d0b709a824 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java
@@ -34,7 +34,7 @@
import org.opensearch.common.Nullable;
import org.opensearch.core.common.bytes.BytesReference;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java
index bbd9ff4c8b912..ce8c182b60a61 100644
--- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java
@@ -33,15 +33,14 @@
package org.opensearch.ingest.common;
import org.opensearch.OpenSearchException;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.OpenSearchParseException;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.ingest.IngestDocument;
import org.opensearch.ingest.Processor;
import org.opensearch.ingest.RandomDocumentPicks;
import org.hamcrest.CoreMatchers;
-import static org.hamcrest.Matchers.equalTo;
-
public class BytesProcessorTests extends AbstractStringProcessorTestCase {
private String modifiedInput;
@@ -101,14 +100,16 @@ public void testMissingUnits() {
assertThat(exception.getMessage(), CoreMatchers.containsString("unit is missing or unrecognized"));
}
- public void testFractional() throws Exception {
+ public void testFractional() {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "1.1kb");
Processor processor = newProcessor(fieldName, randomBoolean(), fieldName);
- processor.execute(ingestDocument);
- assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L));
- assertWarnings(
- "Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " + "[Ingest Field]"
+ OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> processor.execute(ingestDocument));
+ assertThat(
+ e.getMessage(),
+ CoreMatchers.containsString(
+ "Fractional bytes values have been deprecated since Legacy 6.2. " + "Use non-fractional bytes values instead:"
+ )
);
}
}
diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle
index 35b7de8f83164..e126cf37e33a2 100644
--- a/modules/ingest-geoip/build.gradle
+++ b/modules/ingest-geoip/build.gradle
@@ -39,7 +39,7 @@ opensearchplugin {
}
dependencies {
- api('com.maxmind.geoip2:geoip2:4.0.1')
+ api('com.maxmind.geoip2:geoip2:4.1.0')
// geoip2 dependencies:
api('com.maxmind.db:maxmind-db:3.0.0')
api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
diff --git a/modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1
deleted file mode 100644
index 0722ebf08e137..0000000000000
--- a/modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f2a9b0ebd91b73a409a526b4d939f5ab8f4a1a87
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1
new file mode 100644
index 0000000000000..0d124299e4cfb
--- /dev/null
+++ b/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1
@@ -0,0 +1 @@
+b6b356cc91863409ba3475a148ee11a3a6d6aa4b
\ No newline at end of file
diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java
index b189c8ed8905e..87598aa28ce8f 100644
--- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java
+++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java
@@ -35,7 +35,6 @@
import org.opensearch.OpenSearchParseException;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
import org.opensearch.core.xcontent.NamedXContentRegistry;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import java.io.IOException;
@@ -68,7 +67,7 @@ final class UserAgentParser {
private void init(InputStream regexStream) throws IOException {
// EMPTY is safe here because we don't use namedObject
- XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML)
+ XContentParser yamlParser = XContentType.YAML.xContent()
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, regexStream);
XContentParser.Token token = yamlParser.nextToken();
diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java
index fbb8ebdec384c..bb11e493ba3d1 100644
--- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java
+++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java
@@ -34,7 +34,6 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchRequest;
-import org.opensearch.common.Strings;
import org.opensearch.index.IndexNotFoundException;
import org.opensearch.plugins.Plugin;
import org.opensearch.script.ScriptType;
@@ -72,15 +71,14 @@ public void testBasic() throws Exception {
}
indexRandom(true, indexRequestBuilders);
- final String template = Strings.toString(
- jsonBuilder().startObject()
- .startObject("query")
- .startObject("{{query_type}}")
- .field("{{field_name}}", "{{field_value}}")
- .endObject()
- .endObject()
- .endObject()
- );
+ final String template = jsonBuilder().startObject()
+ .startObject("query")
+ .startObject("{{query_type}}")
+ .field("{{field_name}}", "{{field_value}}")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest();
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java
index a4be60d2b6900..0cf1ed525fbfe 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java
@@ -44,7 +44,6 @@
import com.github.mustachejava.codes.IterableCode;
import com.github.mustachejava.codes.WriteCode;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentType;
@@ -228,7 +227,7 @@ protected Function createFunction(Object resolved) {
// Do not handle as JSON
return oh.stringify(resolved);
}
- return Strings.toString(builder);
+ return builder.toString();
} catch (IOException e) {
throw new MustacheException("Failed to convert object to JSON", e);
}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java
index 57451a027c5d7..0936dee0c3016 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java
@@ -33,7 +33,7 @@
package org.opensearch.script.mustache;
import com.github.mustachejava.reflect.ReflectionObjectHandler;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.common.util.iterable.Iterables;
import java.lang.reflect.Array;
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java
index f31e5be078a28..49f5d4194d446 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java
@@ -36,7 +36,7 @@
import org.opensearch.action.ActionResponse;
import org.opensearch.action.search.MultiSearchResponse;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java
index c963ea7ba7da9..166778bd02cee 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java
@@ -38,13 +38,12 @@
import org.opensearch.action.search.SearchRequest;
import org.opensearch.core.ParseField;
import org.opensearch.core.common.ParsingException;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ObjectParser;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.script.ScriptType;
@@ -207,8 +206,8 @@ public ActionRequestValidationException validate() {
request.setScriptType(ScriptType.INLINE);
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
// convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder)
- try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
- request.setScript(Strings.toString(builder.copyCurrentStructure(parser)));
+ try (XContentBuilder builder = MediaTypeRegistry.JSON.contentBuilder()) {
+ request.setScript(builder.copyCurrentStructure(parser).toString());
} catch (IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e);
}
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java
index 6e56ecf3950bb..da67a0d2dd13a 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java
@@ -40,8 +40,8 @@
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.StatusToXContentObject;
import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.rest.RestStatus;
@@ -104,11 +104,11 @@ public static SearchTemplateResponse fromXContent(XContentParser parser) throws
if (contentAsMap.containsKey(TEMPLATE_OUTPUT_FIELD.getPreferredName())) {
Object source = contentAsMap.get(TEMPLATE_OUTPUT_FIELD.getPreferredName());
- XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).value(source);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON).value(source);
searchTemplateResponse.setSource(BytesReference.bytes(builder));
} else {
MediaType contentType = parser.contentType();
- XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(contentType).map(contentAsMap);
XContentParser searchResponseParser = contentType.xContent()
.createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), BytesReference.bytes(builder).streamInput());
diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java
index bbda8d15d9d41..f6b0cc4eecf9a 100644
--- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java
@@ -42,7 +42,6 @@
import org.opensearch.common.inject.Inject;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
import org.opensearch.core.xcontent.NamedXContentRegistry;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.rest.action.search.RestSearchAction;
@@ -132,8 +131,7 @@ static SearchRequest convert(
}
try (
- XContentParser parser = XContentFactory.xContent(XContentType.JSON)
- .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, source)
+ XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, source)
) {
SearchSourceBuilder builder = SearchSourceBuilder.searchSource();
builder.parseXContent(parser, false);
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java
index 664e87da0a7d8..e3669ef67ff36 100644
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java
+++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java
@@ -33,9 +33,9 @@
package org.opensearch.script.mustache;
import org.opensearch.core.common.bytes.BytesReference;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -101,7 +101,7 @@ public void testToXContentWithInlineTemplate() throws IOException {
request.setScriptParams(scriptParams);
XContentType contentType = randomFrom(XContentType.values());
- XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType)
+ XContentBuilder expectedRequest = MediaTypeRegistry.contentBuilder(contentType)
.startObject()
.field("source", "{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }")
.startObject("params")
@@ -112,7 +112,7 @@ public void testToXContentWithInlineTemplate() throws IOException {
.field("profile", true)
.endObject();
- XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType);
+ XContentBuilder actualRequest = MediaTypeRegistry.contentBuilder(contentType);
request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS);
assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType);
@@ -131,7 +131,7 @@ public void testToXContentWithStoredTemplate() throws IOException {
request.setScriptParams(params);
XContentType contentType = randomFrom(XContentType.values());
- XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType)
+ XContentBuilder expectedRequest = MediaTypeRegistry.contentBuilder(contentType)
.startObject()
.field("id", "match_template")
.startObject("params")
@@ -142,7 +142,7 @@ public void testToXContentWithStoredTemplate() throws IOException {
.field("profile", false)
.endObject();
- XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType);
+ XContentBuilder actualRequest = MediaTypeRegistry.contentBuilder(contentType);
request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS);
assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType);
diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java
index 804a72561c10e..fd0a4e9612a8f 100644
--- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java
+++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java
@@ -36,6 +36,7 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.search.ShardSearchFailure;
import org.opensearch.core.common.bytes.BytesReference;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -164,7 +165,7 @@ public void testSourceToXContent() throws IOException {
response.setSource(BytesReference.bytes(source));
XContentType contentType = randomFrom(XContentType.values());
- XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType)
+ XContentBuilder expectedResponse = MediaTypeRegistry.contentBuilder(contentType)
.startObject()
.startObject("template_output")
.startObject("query")
@@ -175,7 +176,7 @@ public void testSourceToXContent() throws IOException {
.endObject()
.endObject();
- XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType);
+ XContentBuilder actualResponse = MediaTypeRegistry.contentBuilder(contentType);
response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS);
assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType);
@@ -210,7 +211,7 @@ public void testSearchResponseToXContent() throws IOException {
response.setResponse(searchResponse);
XContentType contentType = randomFrom(XContentType.values());
- XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType)
+ XContentBuilder expectedResponse = MediaTypeRegistry.contentBuilder(contentType)
.startObject()
.field("took", 0)
.field("timed_out", false)
@@ -235,7 +236,7 @@ public void testSearchResponseToXContent() throws IOException {
.endObject()
.endObject();
- XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType);
+ XContentBuilder actualResponse = MediaTypeRegistry.contentBuilder(contentType);
response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS);
assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType);
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java b/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java
index 8084420295280..c8a28158ad8db 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java
@@ -32,8 +32,8 @@
package org.opensearch.painless.api;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.painless.CompilerSettings;
import java.util.regex.Pattern;
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java
index efa2d51524557..fdf7df94252b6 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java
@@ -32,7 +32,7 @@
package org.opensearch.painless.lookup;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.lang.invoke.MethodHandle;
import java.util.Map;
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java
index 1249a9cffecb2..9a3b8bf9e2eee 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java
@@ -32,7 +32,7 @@
package org.opensearch.painless.lookup;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.lang.invoke.MethodHandle;
import java.util.Map;
diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java
index c3233bc0d924a..26bebfdee2fd0 100644
--- a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java
+++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java
@@ -34,7 +34,7 @@
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.common.settings.Settings;
import java.util.Collections;
diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java
index a7787f4bc3c29..4117eb331197f 100644
--- a/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java
+++ b/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java
@@ -32,7 +32,7 @@
package org.opensearch.painless.api;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.test.OpenSearchTestCase;
import java.util.regex.Pattern;
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java
index 6412059075e5c..fee9471444c19 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java
@@ -38,7 +38,6 @@
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.plugins.Plugin;
@@ -91,7 +90,7 @@ protected void minimalMapping(XContentBuilder b) throws IOException {
public void testDefaults() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
- assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());
+ assertEquals(fieldMapping(this::minimalMapping).toString(), mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10)));
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java
index 63b1b4760b6fe..d35368350592c 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java
@@ -32,11 +32,10 @@
package org.opensearch.index.mapper;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.compress.CompressedXContent;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.index.IndexService;
import org.opensearch.plugins.Plugin;
import org.opensearch.test.OpenSearchSingleNodeTestCase;
@@ -61,18 +60,17 @@ protected Collection> getPlugins() {
}
public void testBasics() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "rank_feature")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = MediaTypeRegistry.JSON.contentBuilder()
+ .startObject()
+ .startObject("type")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "rank_feature")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
@@ -85,10 +83,12 @@ public void testBasics() throws Exception {
* and parsing of a document fails if the document contains these meta-fields.
*/
public void testDocumentParsingFailsOnMetaField() throws Exception {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject());
+ String mapping = MediaTypeRegistry.JSON.contentBuilder().startObject().startObject("_doc").endObject().endObject().toString();
DocumentMapper mapper = parser.parse("_doc", new CompressedXContent(mapping));
String rfMetaField = RankFeatureMetaFieldMapper.CONTENT_TYPE;
- BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
+ BytesReference bytes = BytesReference.bytes(
+ MediaTypeRegistry.JSON.contentBuilder().startObject().field(rfMetaField, 0).endObject()
+ );
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON))
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java
index 6c844bae73da4..b95572835e612 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java
@@ -34,7 +34,6 @@
import org.apache.lucene.document.FeatureField;
import org.apache.lucene.index.IndexableField;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.plugins.Plugin;
import org.hamcrest.Matchers;
@@ -79,7 +78,7 @@ protected boolean supportsMeta() {
public void testDefaults() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
- assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());
+ assertEquals(fieldMapping(this::minimalMapping).toString(), mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(source(this::writeField));
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java
index 390f10c0684bd..4de11d7f64e8e 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java
@@ -34,7 +34,6 @@
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -95,7 +94,7 @@ public void testExistsQueryDocValuesDisabled() throws IOException {
public void testDefaults() throws Exception {
XContentBuilder mapping = fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0));
DocumentMapper mapper = createDocumentMapper(mapping);
- assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
+ assertEquals(mapping.toString(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123)));
IndexableField[] fields = doc.rootDoc().getFields("field");
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java
index 5e67aaa2ed246..551bd38b65f59 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java
@@ -50,9 +50,9 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
-import org.opensearch.common.Strings;
import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.analysis.AnalyzerScope;
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java
index f57aac8a244b7..a8d672c025af0 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java
@@ -36,7 +36,6 @@
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest;
-import org.opensearch.common.Strings;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.index.mapper.MapperExtrasModulePlugin;
import org.opensearch.index.mapper.MapperService;
@@ -61,16 +60,14 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
mapperService.merge(
"_doc",
new CompressedXContent(
- Strings.toString(
- PutMappingRequest.simpleMapping(
- "my_feature_field",
- "type=rank_feature",
- "my_negative_feature_field",
- "type=rank_feature,positive_score_impact=false",
- "my_feature_vector_field",
- "type=rank_features"
- )
- )
+ PutMappingRequest.simpleMapping(
+ "my_feature_field",
+ "type=rank_feature",
+ "my_negative_feature_field",
+ "type=rank_feature,positive_score_impact=false",
+ "my_feature_vector_field",
+ "type=rank_features"
+ ).toString()
),
MapperService.MergeReason.MAPPING_UPDATE
);
diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java
index 34a6af6ee3639..9b7ad4425bd5e 100644
--- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java
+++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java
@@ -32,7 +32,6 @@
package org.opensearch.join.query;
import org.opensearch.action.index.IndexRequestBuilder;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentHelper;
@@ -85,7 +84,7 @@ protected IndexRequestBuilder createIndexRequest(String index, String type, Stri
protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, XContentBuilder builder)
throws IOException {
- Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(builder), false);
+ Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, builder.toString(), false);
return createIndexRequest(index, type, id, parentId, source);
}
diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java
index 4f26a15031dd4..b38bc5d7b45fc 100644
--- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java
+++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java
@@ -416,11 +416,12 @@ public void visit(QueryVisitor visitor) {
}
@Override
- public Query rewrite(IndexReader reader) throws IOException {
- Query rewritten = super.rewrite(reader);
+ public Query rewrite(IndexSearcher searcher) throws IOException {
+ Query rewritten = super.rewrite(searcher);
if (rewritten != this) {
return rewritten;
}
+ IndexReader reader = searcher.getIndexReader();
if (reader instanceof DirectoryReader) {
IndexSearcher indexSearcher = new IndexSearcher(reader);
indexSearcher.setQueryCache(null);
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
index cd8f18b679750..213ba43ee34cd 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
@@ -32,11 +32,10 @@
package org.opensearch.join.mapper;
-import org.opensearch.common.Strings;
-import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.index.IndexService;
import org.opensearch.index.mapper.DocumentMapper;
import org.opensearch.index.mapper.MapperException;
@@ -60,19 +59,18 @@ protected Collection> getPlugins() {
}
public void testSingleLevel() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService()
.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
@@ -133,19 +131,18 @@ public void testSingleLevel() throws Exception {
}
public void testParentIdSpecifiedAsNumber() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService()
.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
@@ -190,20 +187,19 @@ public void testParentIdSpecifiedAsNumber() throws Exception {
}
public void testMultipleLevels() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .field("child", "grand_child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .field("child", "grand_child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService()
.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
@@ -323,39 +319,37 @@ public void testMultipleLevels() throws Exception {
}
public void testUpdateRelations() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .array("child", "grand_child1", "grand_child2")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
+ IndexService indexService = createIndex("test");
+ DocumentMapper docMapper = indexService.mapperService()
+ .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
+ assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
+
+ {
+ final String updateMapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
- .field("parent", "child")
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject()
.endObject()
.endObject()
- );
- IndexService indexService = createIndex("test");
- DocumentMapper docMapper = indexService.mapperService()
- .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
- assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
-
- {
- final String updateMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .array("child", "grand_child1", "grand_child2")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ .toString();
IllegalArgumentException exc = expectThrows(
IllegalArgumentException.class,
() -> indexService.mapperService()
@@ -365,20 +359,19 @@ public void testUpdateRelations() throws Exception {
}
{
- final String updateMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .field("child", "grand_child1")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ final String updateMapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .field("child", "grand_child1")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IllegalArgumentException exc = expectThrows(
IllegalArgumentException.class,
() -> indexService.mapperService()
@@ -388,21 +381,20 @@ public void testUpdateRelations() throws Exception {
}
{
- final String updateMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("uber_parent", "parent")
- .field("parent", "child")
- .array("child", "grand_child1", "grand_child2")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ final String updateMapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("uber_parent", "parent")
+ .field("parent", "child")
+ .array("child", "grand_child1", "grand_child2")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IllegalArgumentException exc = expectThrows(
IllegalArgumentException.class,
() -> indexService.mapperService()
@@ -412,21 +404,20 @@ public void testUpdateRelations() throws Exception {
}
{
- final String updateMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .array("child", "grand_child1", "grand_child2")
- .field("grand_child2", "grand_grand_child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ final String updateMapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .array("child", "grand_child1", "grand_child2")
+ .field("grand_child2", "grand_grand_child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IllegalArgumentException exc = expectThrows(
IllegalArgumentException.class,
() -> indexService.mapperService()
@@ -436,20 +427,19 @@ public void testUpdateRelations() throws Exception {
}
{
- final String updateMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .array("parent", "child", "child2")
- .array("child", "grand_child1", "grand_child2")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ final String updateMapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .array("parent", "child", "child2")
+ .array("child", "grand_child1", "grand_child2")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
docMapper = indexService.mapperService()
.merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE);
ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService());
@@ -462,21 +452,20 @@ public void testUpdateRelations() throws Exception {
}
{
- final String updateMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .array("parent", "child", "child2")
- .array("child", "grand_child1", "grand_child2")
- .array("other", "child_other1", "child_other2")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ final String updateMapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .array("parent", "child", "child2")
+ .array("child", "grand_child1", "grand_child2")
+ .array("other", "child_other1", "child_other2")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
docMapper = indexService.mapperService()
.merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE);
ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService());
@@ -492,23 +481,22 @@ public void testUpdateRelations() throws Exception {
}
public void testInvalidJoinFieldInsideObject() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("object")
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("object")
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(
MapperParsingException.class,
@@ -521,24 +509,23 @@ public void testInvalidJoinFieldInsideObject() throws Exception {
}
public void testInvalidJoinFieldInsideMultiFields() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("number")
- .field("type", "integer")
- .startObject("fields")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("number")
+ .field("type", "integer")
+ .startObject("fields")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(
MapperParsingException.class,
@@ -553,26 +540,25 @@ public void testInvalidJoinFieldInsideMultiFields() throws Exception {
public void testMultipleJoinFields() throws Exception {
IndexService indexService = createIndex("test");
{
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .field("child", "grand_child")
- .endObject()
- .endObject()
- .startObject("another_join_field")
- .field("type", "join")
- .startObject("relations")
- .field("product", "item")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .field("child", "grand_child")
+ .endObject()
+ .endObject()
+ .startObject("another_join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("product", "item")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
MapperParsingException exc = expectThrows(
MapperParsingException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)
@@ -581,43 +567,7 @@ public void testMultipleJoinFields() throws Exception {
}
{
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .startObject("relations")
- .field("parent", "child")
- .field("child", "grand_child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
- indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
- String updateMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("another_join_field")
- .field("type", "join")
- .endObject()
- .endObject()
- .endObject()
- );
- MapperParsingException exc = expectThrows(
- MapperParsingException.class,
- () -> indexService.mapperService()
- .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)
- );
- assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once"));
- }
- }
-
- public void testEagerGlobalOrdinals() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
+ String mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("properties")
.startObject("join_field")
@@ -629,7 +579,40 @@ public void testEagerGlobalOrdinals() throws Exception {
.endObject()
.endObject()
.endObject()
- );
+ .toString();
+ indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
+ String updateMapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("another_join_field")
+ .field("type", "join")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
+ MapperParsingException exc = expectThrows(
+ MapperParsingException.class,
+ () -> indexService.mapperService()
+ .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)
+ );
+ assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once"));
+ }
+ }
+
+ public void testEagerGlobalOrdinals() throws Exception {
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .startObject("relations")
+ .field("parent", "child")
+ .field("child", "grand_child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService()
.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
@@ -640,21 +623,20 @@ public void testEagerGlobalOrdinals() throws Exception {
assertNotNull(service.mapperService().fieldType("join_field#child"));
assertTrue(service.mapperService().fieldType("join_field#child").eagerGlobalOrdinals());
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("join_field")
- .field("type", "join")
- .field("eager_global_ordinals", false)
- .startObject("relations")
- .field("parent", "child")
- .field("child", "grand_child")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("join_field")
+ .field("type", "join")
+ .field("eager_global_ordinals", false)
+ .startObject("relations")
+ .field("parent", "child")
+ .field("child", "grand_child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
assertFalse(service.mapperService().fieldType("join_field").eagerGlobalOrdinals());
assertNotNull(service.mapperService().fieldType("join_field#parent"));
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java
index 6610b103509b0..96220c247d909 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java
@@ -46,7 +46,6 @@
import org.apache.lucene.search.similarities.Similarity;
import org.opensearch.OpenSearchException;
import org.opensearch.Version;
-import org.opensearch.common.Strings;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.XContentBuilder;
@@ -141,7 +140,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
.endObject()
.endObject();
- mapperService.merge(TYPE, new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
+ mapperService.merge(TYPE, new CompressedXContent(mapping.toString()), MapperService.MergeReason.MAPPING_UPDATE);
}
/**
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java
index 9921b6b040901..63af6873e14af 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java
@@ -37,7 +37,6 @@
import org.apache.lucene.search.join.ScoreMode;
import org.opensearch.OpenSearchException;
import org.opensearch.Version;
-import org.opensearch.common.Strings;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.mapper.MapperService;
@@ -122,7 +121,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
.endObject()
.endObject();
- mapperService.merge(TYPE, new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
+ mapperService.merge(TYPE, new CompressedXContent(mapping.toString()), MapperService.MergeReason.MAPPING_UPDATE);
}
/**
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java
index 88da6a6953d1e..ec555448fd218 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java
@@ -39,7 +39,6 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.opensearch.OpenSearchException;
-import org.opensearch.common.Strings;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.mapper.MapperService;
@@ -111,7 +110,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
.endObject()
.endObject();
- mapperService.merge(TYPE, new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
+ mapperService.merge(TYPE, new CompressedXContent(mapping.toString()), MapperService.MergeReason.MAPPING_UPDATE);
}
@Override
diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java
index 86d30f009e709..2702a02a7a951 100644
--- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java
+++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java
@@ -32,7 +32,6 @@
package org.opensearch.percolator;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
@@ -89,8 +88,8 @@ final class PercolateQuery extends Query implements Accountable {
}
@Override
- public Query rewrite(IndexReader reader) throws IOException {
- Query rewritten = candidateMatchesQuery.rewrite(reader);
+ public Query rewrite(IndexSearcher searcher) throws IOException {
+ Query rewritten = candidateMatchesQuery.rewrite(searcher);
if (rewritten != candidateMatchesQuery) {
return new PercolateQuery(
name,
diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
index d5b61d5c5a517..9f49843c37ea5 100644
--- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
+++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
@@ -71,6 +71,8 @@
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.core.ParseField;
import org.opensearch.core.xcontent.ConstructingObjectParser;
+import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParser;
@@ -90,8 +92,8 @@
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.index.query.QueryShardException;
import org.opensearch.index.query.Rewriteable;
-import org.opensearch.indices.breaker.CircuitBreakerService;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import java.io.ByteArrayInputStream;
import java.io.IOException;
@@ -123,7 +125,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder documents;
- private final XContentType documentXContentType;
+ private final MediaType documentXContentType;
private final String indexedDocumentIndex;
private final String indexedDocumentId;
@@ -150,7 +152,7 @@ public PercolateQueryBuilder(String field, BytesReference document, XContentType
* @param documents The binary blob containing document to percolate
* @param documentXContentType The content type of the binary blob containing the document to percolate
*/
- public PercolateQueryBuilder(String field, List documents, XContentType documentXContentType) {
+ public PercolateQueryBuilder(String field, List documents, MediaType documentXContentType) {
if (field == null) {
throw new IllegalArgumentException("[field] is a required argument");
}
@@ -252,7 +254,11 @@ protected PercolateQueryBuilder(String field, Supplier documentS
}
documents = in.readList(StreamInput::readBytesReference);
if (documents.isEmpty() == false) {
- documentXContentType = in.readEnum(XContentType.class);
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
+ documentXContentType = in.readMediaType();
+ } else {
+ documentXContentType = in.readEnum(XContentType.class);
+ }
} else {
documentXContentType = null;
}
@@ -298,7 +304,11 @@ protected void doWriteTo(StreamOutput out) throws IOException {
out.writeBytesReference(document);
}
if (documents.isEmpty() == false) {
- out.writeEnum(documentXContentType);
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
+ documentXContentType.writeTo(out);
+ } else {
+ out.writeEnum((XContentType) documentXContentType);
+ }
}
}
@@ -432,7 +442,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) {
PercolateQueryBuilder rewritten = new PercolateQueryBuilder(
field,
Collections.singletonList(source),
- XContentHelper.xContentType(source)
+ MediaTypeRegistry.xContentType(source)
);
if (name != null) {
rewritten.setName(name);
@@ -560,7 +570,7 @@ public List getDocuments() {
}
// pkg-private for testing
- XContentType getXContentType() {
+ MediaType getXContentType() {
return documentXContentType;
}
diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java
index 3b953fcfe65e1..9f80069b99c10 100644
--- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java
+++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java
@@ -94,7 +94,6 @@
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.CheckedFunction;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.compress.CompressedXContent;
@@ -163,51 +162,49 @@ public void init() throws Exception {
indexService = createIndex(indexName, Settings.EMPTY);
mapperService = indexService.mapperService();
- String mapper = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type")
- .startObject("properties")
- .startObject("int_field")
- .field("type", "integer")
- .endObject()
- .startObject("long_field")
- .field("type", "long")
- .endObject()
- .startObject("half_float_field")
- .field("type", "half_float")
- .endObject()
- .startObject("float_field")
- .field("type", "float")
- .endObject()
- .startObject("double_field")
- .field("type", "double")
- .endObject()
- .startObject("ip_field")
- .field("type", "ip")
- .endObject()
- .startObject("field")
- .field("type", "keyword")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapper = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("type")
+ .startObject("properties")
+ .startObject("int_field")
+ .field("type", "integer")
+ .endObject()
+ .startObject("long_field")
+ .field("type", "long")
+ .endObject()
+ .startObject("half_float_field")
+ .field("type", "half_float")
+ .endObject()
+ .startObject("float_field")
+ .field("type", "float")
+ .endObject()
+ .startObject("double_field")
+ .field("type", "double")
+ .endObject()
+ .startObject("ip_field")
+ .field("type", "ip")
+ .endObject()
+ .startObject("field")
+ .field("type", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
String queryField = "query_field";
- String percolatorMapper = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type")
- .startObject("properties")
- .startObject(queryField)
- .field("type", "percolator")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String percolatorMapper = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("type")
+ .startObject("properties")
+ .startObject(queryField)
+ .field("type", "percolator")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper().mappers().getMapper(queryField);
fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
@@ -1275,7 +1272,7 @@ private CustomQuery(Term term) {
}
@Override
- public Query rewrite(IndexReader reader) throws IOException {
+ public Query rewrite(IndexSearcher searcher) throws IOException {
return new TermQuery(term);
}
diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java
index 34ed195cd0f23..93b78d67bf6f1 100644
--- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java
+++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java
@@ -38,7 +38,6 @@
import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.opensearch.action.get.GetRequest;
import org.opensearch.action.get.GetResponse;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.compress.CompressedXContent;
@@ -109,15 +108,13 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
mapperService.merge(
docType,
new CompressedXContent(
- Strings.toString(
- PutMappingRequest.simpleMapping(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField)
- )
+ PutMappingRequest.simpleMapping(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField).toString()
),
MapperService.MergeReason.MAPPING_UPDATE
);
mapperService.merge(
docType,
- new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(TEXT_FIELD_NAME, "type=text"))),
+ new CompressedXContent(PutMappingRequest.simpleMapping(TEXT_FIELD_NAME, "type=text").toString()),
MapperService.MergeReason.MAPPING_UPDATE
);
}
diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java
index 748b79d70af07..677d169162c74 100644
--- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java
+++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java
@@ -33,7 +33,6 @@
package org.opensearch.percolator;
import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.common.xcontent.XContentType;
@@ -50,7 +49,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
super.initializeAdditionalMappings(mapperService);
mapperService.merge(
"_doc",
- new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping("some_nested_object", "type=nested"))),
+ new CompressedXContent(PutMappingRequest.simpleMapping("some_nested_object", "type=nested").toString()),
MapperService.MergeReason.MAPPING_UPDATE
);
}
diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java
index 77a4718b1d755..c5e2a1f68de9c 100644
--- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java
+++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java
@@ -56,7 +56,6 @@
import org.opensearch.Version;
import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.collect.Tuple;
@@ -163,67 +162,65 @@ public void init() throws Exception {
indexService = createIndex("test");
mapperService = indexService.mapperService();
- String mapper = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .endObject()
- .startObject("field1")
- .field("type", "text")
- .endObject()
- .startObject("field2")
- .field("type", "text")
- .endObject()
- .startObject("_field3")
- .field("type", "text")
- .endObject()
- .startObject("field4")
- .field("type", "text")
- .endObject()
- .startObject("number_field1")
- .field("type", "integer")
- .endObject()
- .startObject("number_field2")
- .field("type", "long")
- .endObject()
- .startObject("number_field3")
- .field("type", "long")
- .endObject()
- .startObject("number_field4")
- .field("type", "half_float")
- .endObject()
- .startObject("number_field5")
- .field("type", "float")
- .endObject()
- .startObject("number_field6")
- .field("type", "double")
- .endObject()
- .startObject("number_field7")
- .field("type", "ip")
- .endObject()
- .startObject("date_field")
- .field("type", "date")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapper = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "text")
+ .endObject()
+ .startObject("field1")
+ .field("type", "text")
+ .endObject()
+ .startObject("field2")
+ .field("type", "text")
+ .endObject()
+ .startObject("_field3")
+ .field("type", "text")
+ .endObject()
+ .startObject("field4")
+ .field("type", "text")
+ .endObject()
+ .startObject("number_field1")
+ .field("type", "integer")
+ .endObject()
+ .startObject("number_field2")
+ .field("type", "long")
+ .endObject()
+ .startObject("number_field3")
+ .field("type", "long")
+ .endObject()
+ .startObject("number_field4")
+ .field("type", "half_float")
+ .endObject()
+ .startObject("number_field5")
+ .field("type", "float")
+ .endObject()
+ .startObject("number_field6")
+ .field("type", "double")
+ .endObject()
+ .startObject("number_field7")
+ .field("type", "ip")
+ .endObject()
+ .startObject("date_field")
+ .field("type", "date")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
}
private void addQueryFieldMappings() throws Exception {
fieldName = randomAlphaOfLength(4);
- String percolatorMapper = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject(fieldName)
- .field("type", "percolator")
- .endObject()
- .endObject()
- .endObject()
- );
+ String percolatorMapper = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject(fieldName)
+ .field("type", "percolator")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapperService.merge(
MapperService.SINGLE_MAPPING_NAME,
new CompressedXContent(percolatorMapper),
@@ -710,17 +707,16 @@ public void testAllowNoAdditionalSettings() throws Exception {
IndexService indexService = createIndex("test1", Settings.EMPTY);
MapperService mapperService = indexService.mapperService();
- String percolatorMapper = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject(fieldName)
- .field("type", "percolator")
- .field("index", "no")
- .endObject()
- .endObject()
- .endObject()
- );
+ String percolatorMapper = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject(fieldName)
+ .field("type", "percolator")
+ .field("index", "no")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapperService.merge(
@@ -735,21 +731,20 @@ public void testAllowNoAdditionalSettings() throws Exception {
// multiple percolator fields are allowed in the mapping, but only one field can be used at index time.
public void testMultiplePercolatorFields() throws Exception {
String typeName = MapperService.SINGLE_MAPPING_NAME;
- String percolatorMapper = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(typeName)
- .startObject("properties")
- .startObject("query_field1")
- .field("type", "percolator")
- .endObject()
- .startObject("query_field2")
- .field("type", "percolator")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String percolatorMapper = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(typeName)
+ .startObject("properties")
+ .startObject("query_field1")
+ .field("type", "percolator")
+ .endObject()
+ .startObject("query_field2")
+ .field("type", "percolator")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
QueryBuilder queryBuilder = matchQuery("field", "value");
@@ -775,23 +770,22 @@ public void testMultiplePercolatorFields() throws Exception {
// percolator field can be nested under an object field, but only one query can be specified per document
public void testNestedPercolatorField() throws Exception {
String typeName = MapperService.SINGLE_MAPPING_NAME;
- String percolatorMapper = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(typeName)
- .startObject("properties")
- .startObject("object_field")
- .field("type", "object")
- .startObject("properties")
- .startObject("query_field")
- .field("type", "percolator")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String percolatorMapper = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(typeName)
+ .startObject("properties")
+ .startObject("object_field")
+ .field("type", "object")
+ .startObject("properties")
+ .startObject("query_field")
+ .field("type", "percolator")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
QueryBuilder queryBuilder = matchQuery("field", "value");
@@ -907,18 +901,17 @@ private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws I
}
public void testEmptyName() throws Exception {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type1")
- .startObject("properties")
- .startObject("")
- .field("type", "percolator")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("type1")
+ .startObject("properties")
+ .startObject("")
+ .field("type", "percolator")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
DocumentMapperParser parser = mapperService.documentMapperParser();
IllegalArgumentException e = expectThrows(
@@ -951,7 +944,7 @@ public void testImplicitlySetDefaultScriptLang() throws Exception {
BytesReference.bytes(
XContentFactory.jsonBuilder()
.startObject()
- .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType())
+ .rawField(fieldName, new BytesArray(query.toString()).streamInput(), query.contentType())
.endObject()
),
XContentType.JSON
@@ -998,7 +991,7 @@ public void testImplicitlySetDefaultScriptLang() throws Exception {
BytesReference.bytes(
XContentFactory.jsonBuilder()
.startObject()
- .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType())
+ .rawField(fieldName, new BytesArray(query.toString()).streamInput(), query.contentType())
.endObject()
),
XContentType.JSON
diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java
index 4dbc348fe458e..68c61183a4486 100644
--- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java
+++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java
@@ -35,8 +35,8 @@
import org.opensearch.OpenSearchException;
import org.opensearch.action.ActionResponse;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
import org.opensearch.common.collect.Tuple;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.xcontent.ConstructingObjectParser;
diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java
index 9585e79e69cf1..44eeceb117794 100644
--- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java
+++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java
@@ -34,7 +34,7 @@
import org.opensearch.core.ParseField;
import org.opensearch.core.common.ParsingException;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java
index 02ac9182c4f35..e91b8671d0804 100644
--- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java
+++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java
@@ -129,7 +129,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
@Override
public String toString() {
- return org.opensearch.common.Strings.toString(XContentType.JSON, this);
+ return Strings.toString(XContentType.JSON, this);
}
@Override
diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java
index c5f899cbefdf6..78c2dbd33182f 100644
--- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java
+++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java
@@ -33,7 +33,7 @@
package org.opensearch.index.rankeval;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
index ea001de0ee7c6..edda52c57f280 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java
@@ -33,12 +33,12 @@
package org.opensearch.index.rankeval;
import org.opensearch.action.OriginalIndices;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -274,7 +274,7 @@ public static DiscountedCumulativeGain createTestItem() {
public void testXContentRoundtrip() throws IOException {
DiscountedCumulativeGain testItem = createTestItem();
- XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS));
try (XContentParser itemParser = createParser(shuffled)) {
itemParser.nextToken();
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java
index 014f52faa9d57..32c7edc845e32 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java
@@ -35,9 +35,9 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -166,7 +166,7 @@ public static ExpectedReciprocalRank createTestItem() {
public void testXContentRoundtrip() throws IOException {
ExpectedReciprocalRank testItem = createTestItem();
- XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS));
try (XContentParser itemParser = createParser(shuffled)) {
itemParser.nextToken();
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java
index 3df79acfa6ce1..d34e6ba8bd72e 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java
@@ -35,9 +35,9 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -183,7 +183,7 @@ public void testNoResults() throws Exception {
public void testXContentRoundtrip() throws IOException {
MeanReciprocalRank testItem = createTestItem();
- XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS));
try (XContentParser itemParser = createParser(shuffled)) {
itemParser.nextToken();
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java
index 3317a2d2f00f1..41340cfd1003d 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java
@@ -35,9 +35,9 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -199,7 +199,7 @@ public static PrecisionAtK createTestItem() {
public void testXContentRoundtrip() throws IOException {
PrecisionAtK testItem = createTestItem();
- XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS));
try (XContentParser itemParser = createParser(shuffled)) {
itemParser.nextToken();
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java
index 7c0590566bba9..b021d739cc6a6 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java
@@ -39,14 +39,14 @@
import org.opensearch.cluster.block.ClusterBlockException;
import org.opensearch.cluster.coordination.NoClusterManagerBlockService;
import org.opensearch.core.common.ParsingException;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentLocation;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -177,7 +177,7 @@ public void testToXContent() throws IOException {
Collections.singletonMap("coffee_query", coffeeQueryQuality),
Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg"))
);
- XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON);
String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString();
assertEquals(
("{"
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java
index c42c0722e0fae..5601c972375f1 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java
@@ -32,7 +32,6 @@
package org.opensearch.index.rankeval;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.core.xcontent.NamedXContentRegistry;
@@ -103,7 +102,7 @@ static RankEvalSpec createTestItem() {
builder.startObject();
builder.field("field", randomAlphaOfLengthBetween(1, 5));
builder.endObject();
- script = Strings.toString(builder);
+ script = builder.toString();
} catch (IOException e) {
// this shouldn't happen in tests, re-throw just not to swallow it
throw new RuntimeException(e);
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java
index 988784b6e57a3..01f5a3a12da01 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java
@@ -34,9 +34,9 @@
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -57,7 +57,7 @@ public static RatedDocument createRatedDocument() {
public void testXContentParsing() throws IOException {
RatedDocument testItem = createRatedDocument();
- XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS));
try (XContentParser itemParser = createParser(shuffled)) {
RatedDocument parsedItem = RatedDocument.fromXContent(itemParser);
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java
index ac2401f30e6f0..6e99e31a2b819 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java
@@ -35,10 +35,10 @@
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.settings.Settings;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -134,7 +134,7 @@ public static RatedRequest createTestItem(boolean forceRequest) {
public void testXContentRoundtrip() throws IOException {
RatedRequest testItem = createTestItem(randomBoolean());
- XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS));
try (XContentParser itemParser = createParser(shuffled)) {
itemParser.nextToken();
diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java
index 85e024f6bb1e9..37f778fbc5059 100644
--- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java
+++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java
@@ -35,9 +35,9 @@
import org.opensearch.action.OriginalIndices;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParseException;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -185,7 +185,7 @@ public static RecallAtK createTestItem() {
public void testXContentRoundtrip() throws IOException {
RecallAtK testItem = createTestItem();
- XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS));
try (XContentParser itemParser = createParser(shuffled)) {
itemParser.nextToken();
diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecReindexIT.java
index 87f3c68d8af76..7c2fe8d99c330 100644
--- a/modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecReindexIT.java
+++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecReindexIT.java
@@ -45,11 +45,15 @@ public void testReindexingMultipleCodecs() throws InterruptedException, Executio
Map codecMap = Map.of(
"best_compression",
"BEST_COMPRESSION",
+ "zlib",
+ "BEST_COMPRESSION",
"zstd_no_dict",
"ZSTD_NO_DICT",
"zstd",
"ZSTD",
"default",
+ "BEST_SPEED",
+ "lz4",
"BEST_SPEED"
);
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
index 6170c1adabbea..a0ad02899ea27 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java
@@ -53,7 +53,7 @@
import org.opensearch.action.support.TransportAction;
import org.opensearch.client.ParentTaskAssigningClient;
import org.opensearch.common.Nullable;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.AbstractRunnable;
import org.opensearch.index.VersionType;
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java
index 6f36fafd852ef..4b1e416ac63ea 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java
@@ -37,8 +37,8 @@
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.search.RestSearchAction;
@@ -106,7 +106,7 @@ private XContentParser extractRequestSpecificFields(RestRequest restRequest, Map
}
try (
XContentParser parser = restRequest.contentOrSourceParamParser();
- XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType())
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(parser.contentType())
) {
Map body = parser.map();
diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java
index 511c44ae3c2b6..68c30b616adfd 100644
--- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java
+++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java
@@ -38,7 +38,6 @@
import org.opensearch.Version;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.client.Request;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.unit.TimeValue;
@@ -181,7 +180,7 @@ static Request initialSearch(SearchRequest searchRequest, BytesReference query,
}
entity.endObject();
- request.setJsonEntity(Strings.toString(entity));
+ request.setJsonEntity(entity.toString());
} catch (IOException e) {
throw new OpenSearchException("unexpected error building entity", e);
}
@@ -246,7 +245,7 @@ static Request scroll(String scroll, TimeValue keepAlive, Version remoteVersion)
try (XContentBuilder entity = JsonXContent.contentBuilder()) {
entity.startObject().field("scroll_id", scroll).endObject();
- request.setJsonEntity(Strings.toString(entity));
+ request.setJsonEntity(entity.toString());
} catch (IOException e) {
throw new OpenSearchException("failed to build scroll entity", e);
}
@@ -263,7 +262,7 @@ static Request clearScroll(String scroll, Version remoteVersion) {
}
try (XContentBuilder entity = JsonXContent.contentBuilder()) {
entity.startObject().array("scroll_id", scroll).endObject();
- request.setJsonEntity(Strings.toString(entity));
+ request.setJsonEntity(entity.toString());
} catch (IOException e) {
throw new OpenSearchException("failed to build clear scroll entity", e);
}
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java
index 97f43b9439408..03d6fafccfea3 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java
@@ -50,7 +50,7 @@
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.network.NetworkModule;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.concurrent.ThreadContext;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.env.Environment;
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java
index e239018e0ce31..89eb8fc7e15a3 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java
@@ -42,7 +42,7 @@
import org.opensearch.client.Client;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
import org.opensearch.http.HttpInfo;
import org.opensearch.index.query.QueryBuilders;
diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java
index ebbd2da776ace..e671fec1fedee 100644
--- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java
+++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java
@@ -44,8 +44,8 @@
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.util.FileSystemUtils;
import org.opensearch.common.io.Streams;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.OpenSearchExecutors;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
diff --git a/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java b/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java
index 1bf461d67862b..9c61bca316a56 100644
--- a/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java
+++ b/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java
@@ -38,7 +38,7 @@
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.Client;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.plugin.repository.url.URLRepositoryModulePlugin;
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.fs.FsRepository;
diff --git a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java
index fbfbf5e006fee..0fad0cbe21033 100644
--- a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java
+++ b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java
@@ -37,8 +37,8 @@
import org.opensearch.common.blobstore.BlobStore;
import org.opensearch.common.blobstore.BlobStoreException;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import java.net.MalformedURLException;
import java.net.URL;
diff --git a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
index 705cbafd1bd3a..16c4ddf6aaabf 100644
--- a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
+++ b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
@@ -36,7 +36,6 @@
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.opensearch.client.Request;
import org.opensearch.client.Response;
-import org.opensearch.common.Strings;
import org.opensearch.common.io.PathUtils;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.ToXContent;
@@ -144,7 +143,7 @@ private static HttpEntity buildRepositorySettings(final String type, final Setti
builder.endObject();
}
builder.endObject();
- return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
+ return new StringEntity(builder.toString(), ContentType.APPLICATION_JSON);
}
}
}
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.94.Final.jar.sha1
deleted file mode 100644
index 05b1c2a4d614e..0000000000000
--- a/modules/transport-netty4/licenses/netty-buffer-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-eec248b26f16e888688e5bb37b7eeda76b78d2f7
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..7abdb33dc79a2
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-buffer-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+4b80fffbe77485b457bf844289bf1801f61b9e91
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1
deleted file mode 100644
index baa7e25f1ac49..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c70ef20ca338558147887df60f46341bc47f6900
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..8fdb32be1de0b
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+9cfe430f8b14e7ba86969d8e1126aa0aae4d18f0
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.94.Final.jar.sha1
deleted file mode 100644
index 8c018be2565e5..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9e5404764092c1f6305ad5719078f46ab228d587
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..dfb0cf39463e2
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+a4d0d95df5026965c454902ef3d6d84b81f89626
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.94.Final.jar.sha1
deleted file mode 100644
index e73026b412972..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f651595784d6cca4cbca6a8ad74c48fceed6cea8
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..2fc787ee65197
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+cc8baf4ff67c1bcc0cde60bc5c2bb9447d92d9e6
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.94.Final.jar.sha1
deleted file mode 100644
index b787338551ede..0000000000000
--- a/modules/transport-netty4/licenses/netty-common-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ad4ecf779ebc794cd351f57792f56ea01387b868
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..85b5f52749671
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-common-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+d10c167623cbc471753f950846df241d1021655c
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.94.Final.jar.sha1
deleted file mode 100644
index b08e85ba7adf8..0000000000000
--- a/modules/transport-netty4/licenses/netty-handler-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cd9121ce24d6d3f2898946d04b0ef3ec548b00b4
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..fe4f48c68e78b
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-handler-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+7840d7523d709e02961b647546f9d9dde1699306
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.94.Final.jar.sha1
deleted file mode 100644
index 4c9e4dda2b852..0000000000000
--- a/modules/transport-netty4/licenses/netty-resolver-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e96f649e8e9dcb29a1f8e95328b99c9eb6cf76c2
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..9e93f013226cd
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-resolver-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+0e51db5568a881e0f9b013b35617c597dc32f130
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.94.Final.jar.sha1
deleted file mode 100644
index ed7760b8e15d1..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ec783a737f96991a87b1d5794e2f9eb2024d708a
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..707285d3d29c3
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+dbd15ca244be28e1a98ed29b9d755edbfa737e02
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1
deleted file mode 100644
index 43bc960a347a1..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3fa5f9d04b6b782d869d6e0657d896eeadca5866
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..e911c47d5ab1a
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+daf8578cade63a01525ee9d70371fa78e6e91094
\ No newline at end of file
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java
index c066f3edf6900..baa306aa0624b 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java
@@ -13,7 +13,7 @@
import io.netty.util.ReferenceCounted;
import org.opensearch.OpenSearchNetty4IntegTestCase;
import org.opensearch.common.collect.Tuple;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
import org.opensearch.test.OpenSearchIntegTestCase.Scope;
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
index db76c0b145840..d01f72ac88c9d 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java
@@ -38,9 +38,9 @@
import org.opensearch.OpenSearchNetty4IntegTestCase;
import org.opensearch.common.collect.Tuple;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.transport.TransportAddress;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.indices.breaker.HierarchyCircuitBreakerService;
import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java
index 96193b0ecb954..d891284f53205 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java
@@ -35,7 +35,7 @@
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.util.ReferenceCounted;
import org.opensearch.OpenSearchNetty4IntegTestCase;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
import org.opensearch.test.OpenSearchIntegTestCase.Scope;
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java
index 3ff3938d23f65..4004d3d1a029d 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java
@@ -36,8 +36,8 @@
import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.opensearch.common.network.NetworkAddress;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.BoundTransportAddress;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.BoundTransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
import org.opensearch.test.OpenSearchIntegTestCase.Scope;
import org.opensearch.test.junit.annotations.Network;
diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java
index e6604abf126da..4722cdb66be18 100644
--- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java
+++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java
@@ -38,8 +38,8 @@
import org.opensearch.common.network.NetworkModule;
import org.opensearch.common.network.NetworkUtils;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.BoundTransportAddress;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.BoundTransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.transport.Netty4ModulePlugin;
import org.opensearch.transport.TransportInfo;
diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java
index 2584b768707cd..f7e1c6106cf5a 100644
--- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java
+++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java
@@ -38,7 +38,7 @@
import org.opensearch.client.ResponseException;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.http.HttpTransportSettings;
import org.opensearch.test.rest.OpenSearchRestTestCase;
import org.opensearch.test.rest.yaml.ObjectPath;
diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java
index 4b962401387b7..b4b15c22258de 100644
--- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java
+++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java
@@ -34,7 +34,6 @@
import org.opensearch.client.Request;
import org.opensearch.client.Response;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.test.rest.OpenSearchRestTestCase;
import org.hamcrest.Matcher;
@@ -68,7 +67,7 @@ private void createTestDoc(final String indexName) throws IOException {
}
builder.endObject();
Request request = new Request("PUT", "/" + indexName + "/_doc/" + "1");
- request.setJsonEntity(Strings.toString(builder));
+ request.setJsonEntity(builder.toString());
client().performRequest(request);
}
}
@@ -109,7 +108,7 @@ public void testAliasExists() throws IOException {
builder.endObject();
Request request = new Request("POST", "/_aliases");
- request.setJsonEntity(Strings.toString(builder));
+ request.setJsonEntity(builder.toString());
client().performRequest(request);
headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0));
headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0));
@@ -136,7 +135,7 @@ public void testTemplateExists() throws IOException {
builder.endObject();
Request request = new Request("PUT", "/_template/template");
- request.setJsonEntity(Strings.toString(builder));
+ request.setJsonEntity(builder.toString());
client().performRequest(request);
headTestCase("/_template/template", emptyMap(), greaterThan(0));
}
@@ -163,7 +162,7 @@ public void testGetSourceAction() throws IOException {
builder.endObject();
Request request = new Request("PUT", "/test-no-source");
- request.setJsonEntity(Strings.toString(builder));
+ request.setJsonEntity(builder.toString());
client().performRequest(request);
createTestDoc("test-no-source");
headTestCase("/test-no-source/_source/1", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0));
diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java
index 124bc02527bd1..998c89590c53c 100644
--- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java
+++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java
@@ -78,8 +78,8 @@
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.util.BigArrays;
import org.opensearch.common.util.concurrent.OpenSearchExecutors;
import org.opensearch.core.xcontent.NamedXContentRegistry;
diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java
index 9a5459a5ab572..a30cf00ce4047 100644
--- a/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java
+++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java
@@ -52,7 +52,7 @@
import io.netty.channel.RecvByteBufAllocator;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.opensearch.common.SuppressForbidden;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import java.io.IOException;
import java.nio.ByteBuffer;
diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java
index d7f2f6eb6acbb..ef60797bca067 100644
--- a/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java
+++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java
@@ -45,7 +45,7 @@
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.http.netty4.Netty4HttpServerTransport;
-import org.opensearch.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
import org.opensearch.plugins.NetworkPlugin;
import org.opensearch.plugins.Plugin;
import org.opensearch.threadpool.ThreadPool;
diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java
index f2f6538d305d9..d32a43e3eb9eb 100644
--- a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java
+++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java
@@ -42,7 +42,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.opensearch.common.Booleans;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.monitor.jvm.JvmInfo;
import java.util.concurrent.atomic.AtomicBoolean;
diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java
index 561cac2facbff..637cb10c383bd 100644
--- a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java
+++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java
@@ -57,13 +57,13 @@
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.util.PageCacheRecycler;
import org.opensearch.common.util.concurrent.OpenSearchExecutors;
import org.opensearch.common.util.net.NetUtils;
import org.opensearch.common.lease.Releasables;
-import org.opensearch.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.Netty4NioSocketChannel;
import org.opensearch.transport.NettyAllocator;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java
index ef014aa39367b..1b175d7991e32 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java
@@ -38,13 +38,13 @@
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.MockBigArrays;
import org.opensearch.common.util.MockPageCacheRecycler;
import org.opensearch.common.util.concurrent.ThreadContext;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.http.HttpTransportSettings;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.rest.BytesRestResponse;
import org.opensearch.rest.RestChannel;
import org.opensearch.rest.RestRequest;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java
index cad2e50327023..9f359002e441a 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java
@@ -72,8 +72,8 @@
import io.netty.util.AttributeKey;
import org.opensearch.common.collect.Tuple;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.tasks.Task;
import org.opensearch.transport.NettyAllocator;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java
index adf4d59a0c139..ca8dfb616e313 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java
@@ -45,14 +45,14 @@
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.MockBigArrays;
import org.opensearch.common.util.MockPageCacheRecycler;
import org.opensearch.http.HttpPipelinedRequest;
import org.opensearch.http.HttpResponse;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.http.NullDispatcher;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.core.rest.RestStatus;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.threadpool.TestThreadPool;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java
index 5a43057b1b7d1..af4ded2255c9c 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java
@@ -64,8 +64,8 @@
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.transport.TransportAddress;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.MockBigArrays;
import org.opensearch.common.util.MockPageCacheRecycler;
@@ -75,7 +75,7 @@
import org.opensearch.http.HttpServerTransport;
import org.opensearch.http.HttpTransportSettings;
import org.opensearch.http.NullDispatcher;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.rest.BytesRestResponse;
import org.opensearch.rest.RestChannel;
import org.opensearch.rest.RestRequest;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java
index f80d7f41b5f55..db7347bf99345 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java
@@ -36,10 +36,10 @@
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.MockPageCacheRecycler;
import org.opensearch.common.util.PageCacheRecycler;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.SharedGroupFactory;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java
index d3fa8ea56ffe7..5ee194b7bb513 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java
@@ -36,14 +36,14 @@
import io.netty.buffer.CompositeByteBuf;
import io.netty.buffer.Unpooled;
import org.apache.lucene.util.BytesRef;
-import org.opensearch.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreaker;
import org.opensearch.common.bytes.AbstractBytesReferenceTestCase;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.io.stream.ReleasableBytesStreamOutput;
import org.opensearch.common.util.BigArrays;
import org.opensearch.common.util.PageCacheRecycler;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.test.OpenSearchTestCase;
import java.io.IOException;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java
index 5d7841df4bf33..c10a92ba1900b 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java
@@ -32,14 +32,14 @@
package org.opensearch.transport.netty4;
import org.opensearch.Version;
-import org.opensearch.common.component.Lifecycle;
+import org.opensearch.common.lifecycle.Lifecycle;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.network.NetworkUtils;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.MockPageCacheRecycler;
import org.opensearch.common.util.PageCacheRecycler;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.threadpool.TestThreadPool;
import org.opensearch.threadpool.ThreadPool;
diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java
index 619f473b8bef2..27bfaef15ca86 100644
--- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java
+++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java
@@ -39,11 +39,11 @@
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.PageCacheRecycler;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.common.util.net.NetUtils;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.test.transport.MockTransportService;
import org.opensearch.test.transport.StubbableTransport;
import org.opensearch.transport.AbstractSimpleTransportTestCase;
diff --git a/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java b/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java
index 37cb73e21b5d4..0a2f48f4215cb 100644
--- a/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java
+++ b/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java
@@ -39,7 +39,6 @@
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.plugin.analysis.icu.AnalysisICUPlugin;
import org.opensearch.plugins.Plugin;
@@ -96,7 +95,7 @@ protected void writeFieldValue(XContentBuilder builder) throws IOException {
public void testDefaults() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
- assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());
+ assertEquals(fieldMapping(this::minimalMapping).toString(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
diff --git a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java
index f95f358532bac..9a6327a5c88ba 100644
--- a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java
+++ b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java
@@ -47,7 +47,7 @@
import org.opensearch.common.network.NetworkAddress;
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.discovery.azure.classic.AzureSeedHostsProvider;
import org.opensearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin;
import org.opensearch.plugins.Plugin;
@@ -63,7 +63,7 @@
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import static org.opensearch.common.util.CollectionUtils.newSingletonArrayList;
+import static org.opensearch.core.common.util.CollectionUtils.newSingletonArrayList;
import static org.opensearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING;
public abstract class AbstractAzureComputeServiceTestCase extends OpenSearchIntegTestCase {
diff --git a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java
index 9dbf08a3e1a01..23db0a74dc3fc 100644
--- a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java
+++ b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java
@@ -51,7 +51,7 @@
import org.opensearch.OpenSearchException;
import org.opensearch.SpecialPermission;
import org.opensearch.cloud.azure.classic.AzureServiceRemoteException;
-import org.opensearch.common.component.AbstractLifecycleComponent;
+import org.opensearch.common.lifecycle.AbstractLifecycleComponent;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.common.Strings;
diff --git a/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java
index e2bc180876a17..89970fdfc322e 100644
--- a/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java
+++ b/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java
@@ -48,7 +48,7 @@
import org.opensearch.common.network.NetworkAddress;
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.discovery.SeedHostsProvider;
diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java
index 4afdff7d2c272..0724dbe4543b8 100644
--- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java
+++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java
@@ -47,7 +47,7 @@
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.SingleObjectCache;
import org.opensearch.discovery.SeedHostsProvider;
diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java
index 861926a9e67c9..07400f2126fe5 100644
--- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java
+++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java
@@ -44,9 +44,9 @@
import org.opensearch.common.io.Streams;
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.PageCacheRecycler;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.test.transport.MockTransportService;
import org.opensearch.transport.Transport;
import org.opensearch.transport.TransportService;
diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java
index cc65e92b1485a..0f7a86bf76622 100644
--- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java
+++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java
@@ -41,11 +41,11 @@
import org.opensearch.common.io.Streams;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.PageCacheRecycler;
import org.opensearch.discovery.SeedHostsProvider;
import org.opensearch.discovery.SeedHostsResolver;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.test.transport.MockTransportService;
import org.opensearch.transport.TransportService;
import org.opensearch.transport.nio.MockNioTransport;
diff --git a/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java b/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java
index f588316fdc32f..db23944b25369 100644
--- a/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java
+++ b/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java
@@ -33,7 +33,6 @@
import org.apache.http.client.methods.HttpGet;
-import org.opensearch.common.Strings;
import org.opensearch.common.SuppressForbidden;
import org.opensearch.common.collect.MapBuilder;
import org.opensearch.common.path.PathTrie;
@@ -139,13 +138,12 @@ private PathTrie defaultHandlers() {
handlers.insert(
nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/instance/service-accounts/default/token"),
request -> jsonValue.apply(
- Strings.toString(
- jsonBuilder().startObject()
- .field("access_token", TOKEN)
- .field("expires_in", TimeUnit.HOURS.toSeconds(1))
- .field("token_type", TOKEN_TYPE)
- .endObject()
- )
+ jsonBuilder().startObject()
+ .field("access_token", TOKEN)
+ .field("expires_in", TimeUnit.HOURS.toSeconds(1))
+ .field("token_type", TOKEN_TYPE)
+ .endObject()
+ .toString()
)
);
@@ -179,9 +177,7 @@ private PathTrie defaultHandlers() {
);
}
- final String json = Strings.toString(
- jsonBuilder().startObject().field("id", "test-instances").field("items", items).endObject()
- );
+ final String json = jsonBuilder().startObject().field("id", "test-instances").field("items", items).endObject().toString();
final byte[] responseAsBytes = json.getBytes(StandardCharsets.UTF_8);
final Map headers = new HashMap<>(JSON_CONTENT_TYPE);
@@ -213,29 +209,28 @@ protected Response handle(final Request request) throws IOException {
}
private static Response newError(final RestStatus status, final String code, final String message) throws IOException {
- final String response = Strings.toString(
- jsonBuilder().startObject()
- .field(
- "error",
- MapBuilder.newMapBuilder()
- .put(
- "errors",
- Collections.singletonList(
- MapBuilder.newMapBuilder()
- .put("domain", "global")
- .put("reason", "required")
- .put("message", message)
- .put("locationType", "header")
- .put("location", code)
- .immutableMap()
- )
+ final String response = jsonBuilder().startObject()
+ .field(
+ "error",
+ MapBuilder.newMapBuilder()
+ .put(
+ "errors",
+ Collections.singletonList(
+ MapBuilder.newMapBuilder()
+ .put("domain", "global")
+ .put("reason", "required")
+ .put("message", message)
+ .put("locationType", "header")
+ .put("location", code)
+ .immutableMap()
)
- .put("code", status.getStatus())
- .put("message", message)
- .immutableMap()
- )
- .endObject()
- );
+ )
+ .put("code", status.getStatus())
+ .put("message", message)
+ .immutableMap()
+ )
+ .endObject()
+ .toString();
return new Response(status.getStatus(), JSON_CONTENT_TYPE, response.getBytes(UTF_8));
}
diff --git a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java
index 4873cb6dcbf7a..b7ae7f8b404be 100644
--- a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java
+++ b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java
@@ -46,7 +46,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.opensearch.cloud.gce.util.Access;
-import org.opensearch.common.component.AbstractLifecycleComponent;
+import org.opensearch.common.lifecycle.AbstractLifecycleComponent;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
diff --git a/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java b/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java
index dfd60f52730a6..3295273c83598 100644
--- a/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java
+++ b/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java
@@ -46,7 +46,7 @@
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.discovery.SeedHostsProvider;
diff --git a/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java
index 2ca1234bb8a04..c63085deb466f 100644
--- a/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java
+++ b/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java
@@ -37,7 +37,7 @@
import org.opensearch.cloud.gce.GceMetadataService;
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.transport.MockTransportService;
import org.opensearch.threadpool.TestThreadPool;
diff --git a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java
index dd83dfa84dbdb..000e667f39eab 100644
--- a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java
+++ b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java
@@ -18,11 +18,11 @@
import org.opensearch.client.ResponseException;
import org.opensearch.client.RestClient;
import org.opensearch.client.WarningsHandler;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentFactory;
-import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.rest.RestStatus;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.IndexSettings;
import org.opensearch.test.rest.OpenSearchRestTestCase;
@@ -94,7 +94,7 @@ public void testQuery() throws IOException {
.endObject()
.endObject();
- String mapping = Strings.toString(builder);
+ String mapping = builder.toString();
createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings());
for (int idx = 0; idx < TEST_VECTORS.length; ++idx) {
@@ -209,7 +209,7 @@ public void testQueryWithWrongMapping() throws IOException {
.endObject()
.endObject();
- String mapping = Strings.toString(builder);
+ String mapping = builder.toString();
Exception ex = assertThrows(ResponseException.class, () -> {
createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings());
});
@@ -225,7 +225,7 @@ public void testQueryWithWrongMapping() throws IOException {
private String createTestIndexWithMappingJson(RestClient client, String index, String mapping, Settings settings) throws IOException {
Request request = new Request("PUT", "/" + index);
- String entity = "{\"settings\": " + Strings.toString(XContentType.JSON, settings);
+ String entity = "{\"settings\": " + Strings.toString(MediaTypeRegistry.JSON, settings);
if (mapping != null) {
entity = entity + ",\"mappings\" : " + mapping;
}
@@ -253,7 +253,7 @@ private void addCorrelationDoc(String index, String docId, List fieldNam
}
builder.endObject();
- request.setJsonEntity(Strings.toString(builder));
+ request.setJsonEntity(builder.toString());
Response response = client().performRequest(request);
assertEquals(request.getEndpoint() + ": failed", RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode()));
}
diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java
index f23a4f25302b1..3489dfdcc4530 100644
--- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java
+++ b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java
@@ -12,11 +12,11 @@
import org.junit.Assert;
import org.opensearch.Version;
import org.opensearch.cluster.ClusterModule;
-import org.opensearch.common.Strings;
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.json.JsonXContent;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.core.common.io.stream.StreamInput;
diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle
index 62651216c8144..17213b96a25f3 100644
--- a/plugins/ingest-attachment/build.gradle
+++ b/plugins/ingest-attachment/build.gradle
@@ -89,7 +89,7 @@ dependencies {
api "org.apache.james:apache-mime4j-core:${versions.mime4j}"
api "org.apache.james:apache-mime4j-dom:${versions.mime4j}"
// EPUB books
- api 'org.apache.commons:commons-lang3:3.12.0'
+ api 'org.apache.commons:commons-lang3:3.13.0'
// Microsoft Word files with visio diagrams
api 'org.apache.commons:commons-math3:3.6.1'
// POIs dependency
diff --git a/plugins/ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1
deleted file mode 100644
index 9273d8c01aaba..0000000000000
--- a/plugins/ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c6842c86792ff03b9f1d1fe2aab8dc23aa6c6f0e
\ No newline at end of file
diff --git a/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1
new file mode 100644
index 0000000000000..d0c2f2486ee1f
--- /dev/null
+++ b/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1
@@ -0,0 +1 @@
+b7263237aa89c1f99b327197c41d0669707a462e
\ No newline at end of file
diff --git a/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java
index d03fcd47fe991..1a8bd1ae1d2a8 100644
--- a/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java
+++ b/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java
@@ -47,7 +47,6 @@
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -396,92 +395,87 @@ public void testPositionIncrementGap() throws IOException {
}
public void testSearchAnalyzerSerialization() throws IOException {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("_doc")
- .startObject("properties")
- .startObject("field")
- .field("type", "annotated_text")
- .field("analyzer", "standard")
- .field("search_analyzer", "keyword")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("_doc")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "annotated_text")
+ .field("analyzer", "standard")
+ .field("search_analyzer", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
DocumentMapper mapper = createDocumentMapper("_doc", mapping);
assertEquals(mapping, mapper.mappingSource().toString());
// special case: default index analyzer
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("_doc")
- .startObject("properties")
- .startObject("field")
- .field("type", "annotated_text")
- .field("analyzer", "default")
- .field("search_analyzer", "keyword")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("_doc")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "annotated_text")
+ .field("analyzer", "default")
+ .field("search_analyzer", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapper = createDocumentMapper("_doc", mapping);
assertEquals(mapping, mapper.mappingSource().toString());
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("_doc")
- .startObject("properties")
- .startObject("field")
- .field("type", "annotated_text")
- .field("analyzer", "keyword")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("_doc")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "annotated_text")
+ .field("analyzer", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapper = createDocumentMapper("_doc", mapping);
assertEquals(mapping, mapper.mappingSource().toString());
// special case: default search analyzer
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("_doc")
- .startObject("properties")
- .startObject("field")
- .field("type", "annotated_text")
- .field("analyzer", "keyword")
- .field("search_analyzer", "default")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("_doc")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "annotated_text")
+ .field("analyzer", "keyword")
+ .field("search_analyzer", "default")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapper = createDocumentMapper("_doc", mapping);
assertEquals(mapping, mapper.mappingSource().toString());
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("_doc")
- .startObject("properties")
- .startObject("field")
- .field("type", "annotated_text")
- .field("analyzer", "keyword")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("_doc")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "annotated_text")
+ .field("analyzer", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapper = createDocumentMapper("_doc", mapping);
XContentBuilder builder = XContentFactory.jsonBuilder();
@@ -489,48 +483,46 @@ public void testSearchAnalyzerSerialization() throws IOException {
mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")));
builder.endObject();
- String mappingString = Strings.toString(builder);
+ String mappingString = builder.toString();
assertTrue(mappingString.contains("analyzer"));
assertTrue(mappingString.contains("search_analyzer"));
assertTrue(mappingString.contains("search_quote_analyzer"));
}
public void testSearchQuoteAnalyzerSerialization() throws IOException {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("_doc")
- .startObject("properties")
- .startObject("field")
- .field("type", "annotated_text")
- .field("analyzer", "standard")
- .field("search_analyzer", "standard")
- .field("search_quote_analyzer", "keyword")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("_doc")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "annotated_text")
+ .field("analyzer", "standard")
+ .field("search_analyzer", "standard")
+ .field("search_quote_analyzer", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
DocumentMapper mapper = createDocumentMapper("_doc", mapping);
assertEquals(mapping, mapper.mappingSource().toString());
// special case: default index/search analyzer
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("_doc")
- .startObject("properties")
- .startObject("field")
- .field("type", "annotated_text")
- .field("analyzer", "default")
- .field("search_analyzer", "default")
- .field("search_quote_analyzer", "keyword")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("_doc")
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "annotated_text")
+ .field("analyzer", "default")
+ .field("search_analyzer", "default")
+ .field("search_quote_analyzer", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
mapper = createDocumentMapper("_doc", mapping);
assertEquals(mapping, mapper.mappingSource().toString());
diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java
index 834c8a448d3d5..87b1624cbcd64 100644
--- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java
+++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java
@@ -34,7 +34,6 @@
import java.util.Collection;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.common.settings.Settings;
@@ -103,16 +102,15 @@ public void testThatDisablingWorksWhenMerging() throws Exception {
DocumentMapper docMapper = service.mapperService().documentMapper();
assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true));
- String disabledMapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type")
- .startObject("_size")
- .field("enabled", false)
- .endObject()
- .endObject()
- .endObject()
- );
+ String disabledMapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("type")
+ .startObject("_size")
+ .field("enabled", false)
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
docMapper = service.mapperService()
.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE);
diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle
index 9ec1b4ee50569..1ffb6ae4b9c95 100644
--- a/plugins/repository-azure/build.gradle
+++ b/plugins/repository-azure/build.gradle
@@ -60,7 +60,7 @@ dependencies {
api 'io.projectreactor:reactor-core:3.5.6'
api 'io.projectreactor.netty:reactor-netty:1.1.8'
api 'io.projectreactor.netty:reactor-netty-core:1.1.8'
- api 'io.projectreactor.netty:reactor-netty-http:1.1.8'
+ api 'io.projectreactor.netty:reactor-netty-http:1.1.9'
api "org.slf4j:slf4j-api:${versions.slf4j}"
api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.94.Final.jar.sha1
deleted file mode 100644
index 670bd4c98a044..0000000000000
--- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9180660dc8479e1594b60b02fc27404af0ea43a6
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..42d5e60ce9d45
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+afd90dc0e164be74b4a3e1a899890557fce98567
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.94.Final.jar.sha1
deleted file mode 100644
index e73026b412972..0000000000000
--- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f651595784d6cca4cbca6a8ad74c48fceed6cea8
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..2fc787ee65197
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+cc8baf4ff67c1bcc0cde60bc5c2bb9447d92d9e6
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.94.Final.jar.sha1
deleted file mode 100644
index de2c4d00aef09..0000000000000
--- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b9192c7cda295d75f236a13a0b1f5a008f05d516
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..8e959bdac5079
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+f53c52dbddaa4a02a51430405792d3f30a89b147
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.94.Final.jar.sha1
deleted file mode 100644
index a2db8bece8f6f..0000000000000
--- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-26ba9d30b8f7b095155b9ac63378d6d9386d85c3
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..d410208dada90
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+dcabd63f4aaec2b4cad7588bfdd4cd2c82287e38
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.94.Final.jar.sha1
deleted file mode 100644
index 2fa927b3b77ba..0000000000000
--- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-25bbe90e10685ce63c32bd0db56574cffffa28de
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..5041cf5473505
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+0095023cc667af76578c9be326a6d54e3e1de52c
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1
deleted file mode 100644
index 43bc960a347a1..0000000000000
--- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3fa5f9d04b6b782d869d6e0657d896eeadca5866
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..e911c47d5ab1a
--- /dev/null
+++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+daf8578cade63a01525ee9d70371fa78e6e91094
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.8.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.8.jar.sha1
deleted file mode 100644
index 5092608c90eba..0000000000000
--- a/plugins/repository-azure/licenses/reactor-netty-http-1.1.8.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-696ea25658295e49906c6aad13fa70acbdeb2359
\ No newline at end of file
diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.9.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.9.jar.sha1
new file mode 100644
index 0000000000000..96deead2c75d1
--- /dev/null
+++ b/plugins/repository-azure/licenses/reactor-netty-http-1.1.9.jar.sha1
@@ -0,0 +1 @@
+408b3037133f2e8ab0f195ccd3f807026be9b860
\ No newline at end of file
diff --git a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java
index 67e79addfedc5..48285f80150be 100644
--- a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java
+++ b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java
@@ -46,7 +46,7 @@
import org.opensearch.common.regex.Regex;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.blobstore.OpenSearchMockAPIBasedRepositoryIntegTestCase;
import org.opensearch.core.rest.RestStatus;
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java
index 2677604ecb622..3846dd14559b5 100644
--- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java
@@ -41,7 +41,7 @@
import org.opensearch.common.blobstore.BlobStore;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.indices.recovery.RecoverySettings;
diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java
index c518cc2716db6..88e9a63384f7a 100644
--- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java
+++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java
@@ -61,8 +61,8 @@
import org.opensearch.common.collect.Tuple;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.settings.SettingsException;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import java.net.Authenticator;
diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java
index 8b68ccebf8c53..ab16edc7b1a2d 100644
--- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java
+++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java
@@ -53,7 +53,7 @@
import org.opensearch.common.network.InetAddresses;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.CountDown;
import org.opensearch.core.rest.RestStatus;
diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java
index 24a226290985a..bfae0a3c4438c 100644
--- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java
+++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java
@@ -38,8 +38,8 @@
import org.opensearch.cluster.metadata.RepositoryMetadata;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.env.Environment;
import org.opensearch.indices.recovery.RecoverySettings;
diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle
index fd2f713dd33cd..0fb98c8b9abff 100644
--- a/plugins/repository-gcs/build.gradle
+++ b/plugins/repository-gcs/build.gradle
@@ -67,7 +67,7 @@ dependencies {
api "com.google.auth:google-auth-library-oauth2-http:${versions.google_auth}"
api 'com.google.cloud:google-cloud-core:2.5.10'
- api 'com.google.cloud:google-cloud-core-http:2.21.0'
+ api 'com.google.cloud:google-cloud-core-http:2.21.1'
api 'com.google.cloud:google-cloud-storage:1.113.1'
api 'com.google.code.gson:gson:2.9.0'
diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.0.jar.sha1
deleted file mode 100644
index 2ef0a9bf9b33e..0000000000000
--- a/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-07da4710ccdbcfee253672c0b9e00e7370626c26
\ No newline at end of file
diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.1.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.1.jar.sha1
new file mode 100644
index 0000000000000..cc5e7a53098ac
--- /dev/null
+++ b/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.1.jar.sha1
@@ -0,0 +1 @@
+88dd2b413dd06826c611e39e6e3259e069f02f66
\ No newline at end of file
diff --git a/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java
index f0f0fb7681c1a..92a2f99f4a441 100644
--- a/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java
+++ b/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java
@@ -55,8 +55,8 @@
import org.opensearch.common.regex.Regex;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.env.Environment;
import org.opensearch.indices.recovery.RecoverySettings;
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java
index 226aec437fc0f..f5c20003ea7b6 100644
--- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java
@@ -55,8 +55,8 @@
import org.opensearch.common.blobstore.support.PlainBlobMetadata;
import org.opensearch.common.collect.MapBuilder;
import org.opensearch.common.io.Streams;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import java.io.ByteArrayInputStream;
import java.io.IOException;
diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java
index a743ac72bdb8b..05d9739c00d50 100644
--- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java
+++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java
@@ -38,8 +38,8 @@
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.blobstore.BlobPath;
import org.opensearch.common.settings.Setting;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.indices.recovery.RecoverySettings;
diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java
index 488376d36cdc4..466344668d966 100644
--- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java
+++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java
@@ -53,7 +53,7 @@
import org.opensearch.common.network.InetAddresses;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.CountDown;
import org.opensearch.core.common.Strings;
diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle
index 6626bfccc6662..1fdb3d2fb41e2 100644
--- a/plugins/repository-hdfs/build.gradle
+++ b/plugins/repository-hdfs/build.gradle
@@ -76,7 +76,7 @@ dependencies {
api 'org.apache.commons:commons-compress:1.23.0'
api 'org.apache.commons:commons-configuration2:2.9.0'
api 'commons-io:commons-io:2.13.0'
- api 'org.apache.commons:commons-lang3:3.12.0'
+ api 'org.apache.commons:commons-lang3:3.13.0'
implementation 'com.google.re2j:re2j:1.7'
api 'javax.servlet:servlet-api:2.5'
api "org.slf4j:slf4j-api:${versions.slf4j}"
diff --git a/plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1
deleted file mode 100644
index 9273d8c01aaba..0000000000000
--- a/plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c6842c86792ff03b9f1d1fe2aab8dc23aa6c6f0e
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1
new file mode 100644
index 0000000000000..d0c2f2486ee1f
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1
@@ -0,0 +1 @@
+b7263237aa89c1f99b327197c41d0669707a462e
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1
deleted file mode 100644
index 6766770f61e78..0000000000000
--- a/plugins/repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2a7df0424eed81818157f22613f36b72487ceb34
\ No newline at end of file
diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..32ced5451cfb6
--- /dev/null
+++ b/plugins/repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+2145ec747511965e4a57099767654cf9083ce8a7
\ No newline at end of file
diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java
index 88c58942e9bbf..10c3bc2d0364b 100644
--- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java
+++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java
@@ -48,7 +48,7 @@
import org.opensearch.common.SuppressForbidden;
import org.opensearch.common.blobstore.BlobPath;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.env.Environment;
diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1
deleted file mode 100644
index 05b1c2a4d614e..0000000000000
--- a/plugins/repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-eec248b26f16e888688e5bb37b7eeda76b78d2f7
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..7abdb33dc79a2
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+4b80fffbe77485b457bf844289bf1801f61b9e91
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1
deleted file mode 100644
index baa7e25f1ac49..0000000000000
--- a/plugins/repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c70ef20ca338558147887df60f46341bc47f6900
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..8fdb32be1de0b
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+9cfe430f8b14e7ba86969d8e1126aa0aae4d18f0
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.94.Final.jar.sha1
deleted file mode 100644
index 8c018be2565e5..0000000000000
--- a/plugins/repository-s3/licenses/netty-codec-http-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9e5404764092c1f6305ad5719078f46ab228d587
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..dfb0cf39463e2
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-codec-http-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+a4d0d95df5026965c454902ef3d6d84b81f89626
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.94.Final.jar.sha1
deleted file mode 100644
index e73026b412972..0000000000000
--- a/plugins/repository-s3/licenses/netty-codec-http2-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f651595784d6cca4cbca6a8ad74c48fceed6cea8
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..2fc787ee65197
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-codec-http2-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+cc8baf4ff67c1bcc0cde60bc5c2bb9447d92d9e6
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1
deleted file mode 100644
index b787338551ede..0000000000000
--- a/plugins/repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ad4ecf779ebc794cd351f57792f56ea01387b868
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..85b5f52749671
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+d10c167623cbc471753f950846df241d1021655c
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1
deleted file mode 100644
index b08e85ba7adf8..0000000000000
--- a/plugins/repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cd9121ce24d6d3f2898946d04b0ef3ec548b00b4
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..fe4f48c68e78b
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+7840d7523d709e02961b647546f9d9dde1699306
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1
deleted file mode 100644
index 4c9e4dda2b852..0000000000000
--- a/plugins/repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e96f649e8e9dcb29a1f8e95328b99c9eb6cf76c2
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..9e93f013226cd
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+0e51db5568a881e0f9b013b35617c597dc32f130
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.94.Final.jar.sha1
deleted file mode 100644
index ed7760b8e15d1..0000000000000
--- a/plugins/repository-s3/licenses/netty-transport-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ec783a737f96991a87b1d5794e2f9eb2024d708a
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..707285d3d29c3
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-transport-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+dbd15ca244be28e1a98ed29b9d755edbfa737e02
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.94.Final.jar.sha1
deleted file mode 100644
index 72a392ea2917d..0000000000000
--- a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-240e36cd5c2ffaf655913f8857f2d58b26394679
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..58564d9da4b27
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+b0369501645f6e71f89ff7f77b5c5f52510a2e31
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1
deleted file mode 100644
index 43bc960a347a1..0000000000000
--- a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3fa5f9d04b6b782d869d6e0657d896eeadca5866
\ No newline at end of file
diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..e911c47d5ab1a
--- /dev/null
+++ b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+daf8578cade63a01525ee9d70371fa78e6e91094
\ No newline at end of file
diff --git a/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java
index 3070c654a96ee..805af9874b552 100644
--- a/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java
+++ b/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java
@@ -45,7 +45,7 @@
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.indices.recovery.RecoverySettings;
import org.opensearch.plugins.Plugin;
diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java
index 81a902a6992d8..1a644934245cb 100644
--- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java
+++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java
@@ -51,8 +51,8 @@
import org.opensearch.common.blobstore.support.AbstractBlobContainer;
import org.opensearch.common.blobstore.support.PlainBlobMetadata;
import org.opensearch.common.collect.Tuple;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import software.amazon.awssdk.core.exception.SdkException;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest;
diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java
index 30040e182cbc9..ed1ebf1f531f8 100644
--- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java
+++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java
@@ -39,7 +39,7 @@
import org.opensearch.common.blobstore.BlobPath;
import org.opensearch.common.blobstore.BlobStore;
import org.opensearch.common.blobstore.BlobStoreException;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import software.amazon.awssdk.services.s3.model.ObjectCannedACL;
import software.amazon.awssdk.services.s3.model.StorageClass;
import org.opensearch.repositories.s3.async.AsyncExecutorContainer;
diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java
index d42bfc0be7e4f..f98b775d9ce4b 100644
--- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java
+++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java
@@ -46,8 +46,8 @@
import org.opensearch.common.settings.SecureSetting;
import org.opensearch.core.common.settings.SecureString;
import org.opensearch.common.settings.Setting;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.indices.recovery.RecoverySettings;
diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java
index 5b43ae84c51dc..cb6851652f208 100644
--- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java
+++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java
@@ -17,7 +17,7 @@
import org.opensearch.common.blobstore.exception.CorruptFileException;
import org.opensearch.common.blobstore.stream.write.WritePriority;
import org.opensearch.common.io.InputStreamContainer;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.util.ByteUtils;
import org.opensearch.repositories.s3.io.CheckedContainer;
import org.opensearch.repositories.s3.SocketAccess;
diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java
index 10137f0475177..8bb446fff0b61 100644
--- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java
+++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java
@@ -24,7 +24,7 @@
import org.opensearch.common.io.InputStreamContainer;
import org.opensearch.common.lucene.store.ByteArrayIndexInput;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.repositories.s3.async.AsyncExecutorContainer;
import org.opensearch.repositories.s3.async.AsyncTransferManager;
diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java
index 1a1fb123aa5ea..016be07e5d533 100644
--- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java
+++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java
@@ -57,8 +57,8 @@
import org.opensearch.common.network.InetAddresses;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.CountDown;
import org.opensearch.common.util.io.IOUtils;
diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java
index a2a7ca8d8bdd5..f88b3616d2f0a 100644
--- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java
+++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java
@@ -40,7 +40,7 @@
import org.opensearch.common.blobstore.BlobStoreException;
import org.opensearch.common.blobstore.DeleteResult;
import org.opensearch.common.collect.Tuple;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.test.OpenSearchTestCase;
import software.amazon.awssdk.core.exception.SdkException;
import software.amazon.awssdk.core.sync.RequestBody;
diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java
index 84d56c7ae2854..e5fd9e5caab9c 100644
--- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java
+++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java
@@ -36,8 +36,8 @@
import org.opensearch.cluster.metadata.RepositoryMetadata;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.indices.recovery.RecoverySettings;
import org.opensearch.repositories.RepositoryException;
diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java
index 596291a1d94fb..2fc10c65eaa65 100644
--- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java
+++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java
@@ -14,7 +14,7 @@
import org.opensearch.common.blobstore.exception.CorruptFileException;
import org.opensearch.common.blobstore.stream.write.WritePriority;
import org.opensearch.common.io.InputStreamContainer;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.repositories.blobstore.ZeroInputStream;
import org.opensearch.test.OpenSearchTestCase;
import software.amazon.awssdk.awscore.exception.AwsErrorDetails;
diff --git a/plugins/telemetry-otel/build.gradle b/plugins/telemetry-otel/build.gradle
index 2c275388cce38..04fa9df9a47d0 100644
--- a/plugins/telemetry-otel/build.gradle
+++ b/plugins/telemetry-otel/build.gradle
@@ -11,6 +11,8 @@ import org.opensearch.gradle.Architecture
import org.opensearch.gradle.OS
import org.opensearch.gradle.info.BuildParams
+apply plugin: 'opensearch.internal-cluster-test'
+
opensearchplugin {
description 'Opentelemetry based telemetry implementation.'
classname 'org.opensearch.telemetry.OTelTelemetryPlugin'
@@ -29,6 +31,7 @@ dependencies {
api "io.opentelemetry:opentelemetry-semconv:${versions.opentelemetry}-alpha"
api "io.opentelemetry:opentelemetry-sdk-logs:${versions.opentelemetry}-alpha"
api "io.opentelemetry:opentelemetry-api-logs:${versions.opentelemetry}-alpha"
+ testImplementation "io.opentelemetry:opentelemetry-sdk-testing:${versions.opentelemetry}"
}
diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/InMemorySingletonSpanExporter.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/InMemorySingletonSpanExporter.java
new file mode 100644
index 0000000000000..72d742a804044
--- /dev/null
+++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/InMemorySingletonSpanExporter.java
@@ -0,0 +1,70 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.tracing;
+
+import io.opentelemetry.sdk.common.CompletableResultCode;
+import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter;
+import io.opentelemetry.sdk.trace.data.SpanData;
+import io.opentelemetry.sdk.trace.export.SpanExporter;
+import org.opensearch.test.telemetry.tracing.MockSpanData;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class InMemorySingletonSpanExporter implements SpanExporter {
+
+ private static final InMemorySingletonSpanExporter INSTANCE = new InMemorySingletonSpanExporter(InMemorySpanExporter.create());
+
+ private static InMemorySpanExporter delegate;
+
+ public static InMemorySingletonSpanExporter create() {
+ return INSTANCE;
+ }
+
+ private InMemorySingletonSpanExporter(InMemorySpanExporter delegate) {
+ InMemorySingletonSpanExporter.delegate = delegate;
+ }
+
+ @Override
+ public CompletableResultCode export(Collection spans) {
+ return delegate.export(spans);
+ }
+
+ @Override
+ public CompletableResultCode flush() {
+ return delegate.flush();
+ }
+
+ @Override
+ public CompletableResultCode shutdown() {
+ return delegate.shutdown();
+ }
+
+ public List getFinishedSpanItems() {
+ return convertSpanDataListToMockSpanDataList(delegate.getFinishedSpanItems());
+ }
+
+ private List convertSpanDataListToMockSpanDataList(List spanDataList) {
+ List mockSpanDataList = spanDataList.stream()
+ .map(
+ spanData -> new MockSpanData(
+ spanData.getSpanId(),
+ spanData.getParentSpanId(),
+ spanData.getTraceId(),
+ spanData.getStartEpochNanos(),
+ spanData.getEndEpochNanos(),
+ spanData.hasEnded(),
+ spanData.getName()
+ )
+ )
+ .collect(Collectors.toList());
+ return mockSpanDataList;
+ }
+}
diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/IntegrationTestOTelTelemetryPlugin.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/IntegrationTestOTelTelemetryPlugin.java
new file mode 100644
index 0000000000000..ca3cfeecfd613
--- /dev/null
+++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/IntegrationTestOTelTelemetryPlugin.java
@@ -0,0 +1,40 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.tracing;
+
+import io.opentelemetry.api.GlobalOpenTelemetry;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.telemetry.OTelTelemetryPlugin;
+import org.opensearch.telemetry.Telemetry;
+import org.opensearch.telemetry.TelemetrySettings;
+
+import java.util.Optional;
+
+/**
+ * Telemetry plugin used for Integration tests.
+*/
+public class IntegrationTestOTelTelemetryPlugin extends OTelTelemetryPlugin {
+ /**
+ * Creates IntegrationTestOTelTelemetryPlugin
+ * @param settings cluster settings
+ */
+ public IntegrationTestOTelTelemetryPlugin(Settings settings) {
+ super(settings);
+ }
+
+ /**
+ * This method overrides getTelemetry() method in OTel plugin class, so we create only one instance of global OpenTelemetry
+ * resetForTest() will set OpenTelemetry to null again.
+ * @param settings cluster settings
+ */
+ public Optional getTelemetry(TelemetrySettings settings) {
+ GlobalOpenTelemetry.resetForTest();
+ return super.getTelemetry(settings);
+ }
+}
diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/TelemetryTracerDisabledSanityIT.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/TelemetryTracerDisabledSanityIT.java
new file mode 100644
index 0000000000000..476a5a9cabdc7
--- /dev/null
+++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/TelemetryTracerDisabledSanityIT.java
@@ -0,0 +1,77 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.tracing;
+
+import org.opensearch.client.Client;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.unit.TimeValue;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.telemetry.OTelTelemetrySettings;
+import org.opensearch.telemetry.TelemetrySettings;
+import org.opensearch.test.OpenSearchIntegTestCase;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.opensearch.index.query.QueryBuilders.queryStringQuery;
+
+@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, supportsDedicatedMasters = false, minNumDataNodes = 2)
+public class TelemetryTracerDisabledSanityIT extends OpenSearchIntegTestCase {
+
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put(
+ OTelTelemetrySettings.OTEL_TRACER_SPAN_EXPORTER_CLASS_SETTING.getKey(),
+ "org.opensearch.telemetry.tracing.InMemorySingletonSpanExporter"
+ )
+ .put(OTelTelemetrySettings.TRACER_EXPORTER_DELAY_SETTING.getKey(), TimeValue.timeValueSeconds(1))
+ .build();
+ }
+
+ @Override
+ protected Collection> nodePlugins() {
+ return Arrays.asList(IntegrationTestOTelTelemetryPlugin.class);
+ }
+
+ @Override
+ protected boolean addMockTelemetryPlugin() {
+ return false;
+ }
+
+ public void testSanityCheckWhenTracingDisabled() throws Exception {
+ Client client = client();
+ // DISABLE TRACING
+ client.admin()
+ .cluster()
+ .prepareUpdateSettings()
+ .setTransientSettings(Settings.builder().put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), false))
+ .get();
+
+ // Create Index and ingest data
+ String indexName = "test-index-11";
+ Settings basicSettings = Settings.builder().put("number_of_shards", 3).put("number_of_replicas", 1).build();
+ createIndex(indexName, basicSettings);
+ indexRandom(true, client.prepareIndex(indexName).setId("1").setSource("field1", "t`"));
+
+ ensureGreen();
+ refresh();
+
+ // Make the search call;
+ client.prepareSearch().setQuery(queryStringQuery("fox")).get();
+
+ // Sleep for about 3s to wait for traces are published (the delay is 1s)
+ Thread.sleep(3000);
+
+ InMemorySingletonSpanExporter exporter = InMemorySingletonSpanExporter.create();
+ assertTrue(exporter.getFinishedSpanItems().isEmpty());
+ }
+
+}
diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/TelemetryTracerEnabledSanityIT.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/TelemetryTracerEnabledSanityIT.java
new file mode 100644
index 0000000000000..d95f289f7f631
--- /dev/null
+++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/tracing/TelemetryTracerEnabledSanityIT.java
@@ -0,0 +1,95 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.tracing;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.unit.TimeValue;
+import org.opensearch.telemetry.TelemetrySettings;
+import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.telemetry.OTelTelemetrySettings;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.client.Client;
+import org.opensearch.test.telemetry.tracing.TelemetryValidators;
+import org.opensearch.test.telemetry.tracing.validators.AllSpansAreEndedProperly;
+import org.opensearch.test.telemetry.tracing.validators.AllSpansHaveUniqueId;
+import org.opensearch.test.telemetry.tracing.validators.NumberOfTraceIDsEqualToRequests;
+import org.opensearch.test.telemetry.tracing.validators.TotalRootSpansEqualToRequests;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.opensearch.index.query.QueryBuilders.queryStringQuery;
+
+@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, minNumDataNodes = 2)
+public class TelemetryTracerEnabledSanityIT extends OpenSearchIntegTestCase {
+
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put(
+ OTelTelemetrySettings.OTEL_TRACER_SPAN_EXPORTER_CLASS_SETTING.getKey(),
+ "org.opensearch.telemetry.tracing.InMemorySingletonSpanExporter"
+ )
+ .put(OTelTelemetrySettings.TRACER_EXPORTER_DELAY_SETTING.getKey(), TimeValue.timeValueSeconds(1))
+ .build();
+ }
+
+ @Override
+ protected Collection> nodePlugins() {
+ return Arrays.asList(IntegrationTestOTelTelemetryPlugin.class);
+ }
+
+ @Override
+ protected boolean addMockTelemetryPlugin() {
+ return false;
+ }
+
+ public void testSanityChecksWhenTracingEnabled() throws Exception {
+ Client client = client();
+ // ENABLE TRACING
+ client.admin()
+ .cluster()
+ .prepareUpdateSettings()
+ .setTransientSettings(Settings.builder().put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), true))
+ .get();
+
+ // Create Index and ingest data
+ String indexName = "test-index-11";
+ Settings basicSettings = Settings.builder().put("number_of_shards", 3).put("number_of_replicas", 0).build();
+ createIndex(indexName, basicSettings);
+ indexRandom(true, client.prepareIndex(indexName).setId("1").setSource("field1", "the fox jumps in the well"));
+ indexRandom(true, client.prepareIndex(indexName).setId("1").setSource("field2", "another fox did the same."));
+
+ ensureGreen();
+ refresh();
+
+ // Make the search calls;
+ client.prepareSearch().setQuery(queryStringQuery("fox")).get();
+ client.prepareSearch().setQuery(queryStringQuery("jumps")).get();
+
+ // Sleep for about 3s to wait for traces are published, delay is (the delay is 1s).
+ Thread.sleep(3000);
+
+ TelemetryValidators validators = new TelemetryValidators(
+ Arrays.asList(
+ new AllSpansAreEndedProperly(),
+ new AllSpansHaveUniqueId(),
+ new NumberOfTraceIDsEqualToRequests(),
+ new TotalRootSpansEqualToRequests()
+ )
+ );
+
+ InMemorySingletonSpanExporter exporter = InMemorySingletonSpanExporter.create();
+ if (!exporter.getFinishedSpanItems().isEmpty()) {
+ validators.validate(exporter.getFinishedSpanItems(), 2);
+ }
+ }
+
+}
diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1
deleted file mode 100644
index 05b1c2a4d614e..0000000000000
--- a/plugins/transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-eec248b26f16e888688e5bb37b7eeda76b78d2f7
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..7abdb33dc79a2
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+4b80fffbe77485b457bf844289bf1801f61b9e91
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1
deleted file mode 100644
index baa7e25f1ac49..0000000000000
--- a/plugins/transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c70ef20ca338558147887df60f46341bc47f6900
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..8fdb32be1de0b
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+9cfe430f8b14e7ba86969d8e1126aa0aae4d18f0
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.94.Final.jar.sha1
deleted file mode 100644
index 8c018be2565e5..0000000000000
--- a/plugins/transport-nio/licenses/netty-codec-http-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9e5404764092c1f6305ad5719078f46ab228d587
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..dfb0cf39463e2
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+a4d0d95df5026965c454902ef3d6d84b81f89626
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1
deleted file mode 100644
index b787338551ede..0000000000000
--- a/plugins/transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ad4ecf779ebc794cd351f57792f56ea01387b868
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..85b5f52749671
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+d10c167623cbc471753f950846df241d1021655c
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1
deleted file mode 100644
index b08e85ba7adf8..0000000000000
--- a/plugins/transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cd9121ce24d6d3f2898946d04b0ef3ec548b00b4
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..fe4f48c68e78b
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+7840d7523d709e02961b647546f9d9dde1699306
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1
deleted file mode 100644
index 4c9e4dda2b852..0000000000000
--- a/plugins/transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e96f649e8e9dcb29a1f8e95328b99c9eb6cf76c2
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..9e93f013226cd
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+0e51db5568a881e0f9b013b35617c597dc32f130
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.94.Final.jar.sha1
deleted file mode 100644
index ed7760b8e15d1..0000000000000
--- a/plugins/transport-nio/licenses/netty-transport-4.1.94.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ec783a737f96991a87b1d5794e2f9eb2024d708a
\ No newline at end of file
diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.96.Final.jar.sha1
new file mode 100644
index 0000000000000..707285d3d29c3
--- /dev/null
+++ b/plugins/transport-nio/licenses/netty-transport-4.1.96.Final.jar.sha1
@@ -0,0 +1 @@
+dbd15ca244be28e1a98ed29b9d755edbfa737e02
\ No newline at end of file
diff --git a/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java b/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java
index ac06bf03ed8cd..9afb8e37cd9a9 100644
--- a/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java
+++ b/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java
@@ -34,7 +34,7 @@
import io.netty.handler.codec.http.FullHttpResponse;
import org.opensearch.NioIntegTestCase;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
import org.opensearch.test.OpenSearchIntegTestCase.Scope;
diff --git a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java
index 1befc110eb6a5..9dd13144bc454 100644
--- a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java
+++ b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java
@@ -41,7 +41,7 @@
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.util.BigArrays;
import org.opensearch.common.util.PageCacheRecycler;
import org.opensearch.core.xcontent.NamedXContentRegistry;
diff --git a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java
index 1509e0b179bfe..0990f97124e53 100644
--- a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java
+++ b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java
@@ -44,7 +44,7 @@
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.PageCacheRecycler;
-import org.opensearch.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
import org.opensearch.nio.BytesChannelContext;
import org.opensearch.nio.ChannelFactory;
import org.opensearch.nio.Config;
diff --git a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java
index 67598aec154fa..bd8aec534f608 100644
--- a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java
+++ b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java
@@ -47,7 +47,7 @@
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.http.HttpServerTransport;
import org.opensearch.http.nio.NioHttpServerTransport;
-import org.opensearch.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
import org.opensearch.plugins.NetworkPlugin;
import org.opensearch.plugins.Plugin;
import org.opensearch.threadpool.ThreadPool;
diff --git a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java
index 29ef19a2aec87..ee96e9d85c03b 100644
--- a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java
+++ b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java
@@ -32,7 +32,7 @@
package org.opensearch.transport.nio;
-import org.opensearch.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreaker;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.bytes.CompositeBytesReference;
import org.opensearch.common.bytes.ReleasableBytesReference;
diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java
index c606a4818a324..ca62c0dedd452 100644
--- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java
+++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java
@@ -49,7 +49,7 @@
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.http.CorsHandler;
import org.opensearch.http.HttpChannel;
diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java
index edaee15507df9..2922f28e3be18 100644
--- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java
+++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java
@@ -51,8 +51,8 @@
import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.nio.BytesChannelContext;
import org.opensearch.nio.ChannelFactory;
diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java
index c69fe23002dfe..22bda4881c322 100644
--- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java
+++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java
@@ -53,8 +53,8 @@
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.transport.TransportAddress;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.MockBigArrays;
import org.opensearch.common.util.MockPageCacheRecycler;
@@ -64,7 +64,7 @@
import org.opensearch.http.HttpServerTransport;
import org.opensearch.http.HttpTransportSettings;
import org.opensearch.http.NullDispatcher;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.nio.NioSocketChannel;
import org.opensearch.rest.BytesRestResponse;
import org.opensearch.rest.RestChannel;
diff --git a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java
index 4b06c4e15bce7..d7f603031ac17 100644
--- a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java
+++ b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java
@@ -39,11 +39,11 @@
import org.opensearch.common.network.NetworkService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.util.MockPageCacheRecycler;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.common.util.net.NetUtils;
-import org.opensearch.indices.breaker.NoneCircuitBreakerService;
+import org.opensearch.core.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.test.transport.MockTransportService;
import org.opensearch.test.transport.StubbableTransport;
import org.opensearch.transport.AbstractSimpleTransportTestCase;
diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java
index 07cd901449a18..6b09d5477e8d2 100644
--- a/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java
+++ b/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java
@@ -61,7 +61,6 @@
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodes;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.json.JsonXContent;
@@ -341,7 +340,7 @@ private static HttpEntity buildUpdateSettingsRequestBody(Map set
builder.endObject();
}
builder.endObject();
- requestBody = Strings.toString(builder);
+ requestBody = builder.toString();
}
return new StringEntity(requestBody, ContentType.APPLICATION_JSON);
}
diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java
index 5fef24e75d8b7..95bc7f8dc404e 100644
--- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java
+++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java
@@ -41,11 +41,11 @@
import org.opensearch.cluster.metadata.MetadataIndexStateService;
import org.opensearch.common.Booleans;
import org.opensearch.common.CheckedFunction;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.support.XContentMapValues;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.IndexSettings;
import org.opensearch.test.NotEqualMessageBuilder;
@@ -149,7 +149,7 @@ public void testSearch() throws Exception {
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
- createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
+ createIndex.setJsonEntity(mappingsAndSettings.toString());
createIndex.setOptions(allowTypesRemovalWarnings());
client().performRequest(createIndex);
@@ -208,7 +208,7 @@ public void testNewReplicasWork() throws Exception {
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
- createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
+ createIndex.setJsonEntity(mappingsAndSettings.toString());
client().performRequest(createIndex);
int numDocs = randomIntBetween(2000, 3000);
@@ -257,7 +257,7 @@ public void testClusterState() throws Exception {
}
mappingsAndSettings.endObject();
Request createTemplate = new Request("PUT", "/_template/template_1");
- createTemplate.setJsonEntity(Strings.toString(mappingsAndSettings));
+ createTemplate.setJsonEntity(mappingsAndSettings.toString());
client().performRequest(createTemplate);
client().performRequest(new Request("PUT", "/" + index));
}
@@ -315,7 +315,7 @@ public void testShrink() throws IOException, NumberFormatException, ParseExcepti
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
- createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
+ createIndex.setJsonEntity(mappingsAndSettings.toString());
client().performRequest(createIndex);
numDocs = randomIntBetween(512, 1024);
@@ -384,7 +384,7 @@ public void testShrinkAfterUpgrade() throws IOException, ParseException {
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
- createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
+ createIndex.setJsonEntity(mappingsAndSettings.toString());
client().performRequest(createIndex);
numDocs = randomIntBetween(512, 1024);
@@ -854,7 +854,7 @@ public void testSnapshotRestore() throws IOException, ParseException {
}
templateBuilder.endObject().endObject();
Request createTemplateRequest = new Request("PUT", "/_template/test_template");
- createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder));
+ createTemplateRequest.setJsonEntity(templateBuilder.toString());
client().performRequest(createTemplateRequest);
@@ -870,7 +870,7 @@ public void testSnapshotRestore() throws IOException, ParseException {
}
repoConfig.endObject();
Request createRepoRequest = new Request("PUT", "/_snapshot/repo");
- createRepoRequest.setJsonEntity(Strings.toString(repoConfig));
+ createRepoRequest.setJsonEntity(repoConfig.toString());
client().performRequest(createRepoRequest);
}
@@ -897,7 +897,7 @@ public void testHistoryUUIDIsAdded() throws Exception {
}
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
- createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
+ createIndex.setJsonEntity(mappingsAndSettings.toString());
client().performRequest(createIndex);
} else {
ensureGreenLongWait(index);
@@ -940,11 +940,11 @@ public void testSoftDeletes() throws Exception {
}
mappingsAndSettings.endObject();
Request createIndex = new Request("PUT", "/" + index);
- createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
+ createIndex.setJsonEntity(mappingsAndSettings.toString());
client().performRequest(createIndex);
int numDocs = between(10, 100);
for (int i = 0; i < numDocs; i++) {
- String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject());
+ String doc = JsonXContent.contentBuilder().startObject().field("field", "v1").endObject().toString();
Request request = new Request("POST", "/" + index + "/_doc/" + i);
request.setJsonEntity(doc);
client().performRequest(request);
@@ -955,7 +955,7 @@ public void testSoftDeletes() throws Exception {
assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))));
for (int i = 0; i < numDocs; i++) {
if (randomBoolean()) {
- String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v2").endObject());
+ String doc = JsonXContent.contentBuilder().startObject().field("field", "v2").endObject().toString();
Request request = new Request("POST", "/" + index + "/_doc/" + i);
request.setJsonEntity(doc);
client().performRequest(request);
@@ -989,7 +989,7 @@ public void testClosedIndices() throws Exception {
numDocs = between(1, 100);
for (int i = 0; i < numDocs; i++) {
final Request request = new Request("POST", "/" + index + "/" + type + "/" + i);
- request.setJsonEntity(Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject()));
+ request.setJsonEntity(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject().toString());
assertOK(client().performRequest(request));
if (rarely()) {
refresh();
@@ -1083,7 +1083,7 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver
restoreCommand.endObject();
Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshotName + "/_restore");
restoreRequest.addParameter("wait_for_completion", "true");
- restoreRequest.setJsonEntity(Strings.toString(restoreCommand));
+ restoreRequest.setJsonEntity(restoreCommand.toString());
client().performRequest(restoreRequest);
// Make sure search finds all documents
@@ -1158,7 +1158,7 @@ private void indexRandomDocuments(
for (int i = 0; i < count; i++) {
logger.debug("Indexing document [{}]", i);
Request createDocument = new Request("POST", "/" + index + "/_doc/" + (specifyId ? i : ""));
- createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
+ createDocument.setJsonEntity(docSupplier.apply(i).toString());
client().performRequest(createDocument);
if (rarely()) {
refreshAllIndices();
@@ -1175,7 +1175,7 @@ private void indexRandomDocuments(
private void indexDocument(String id) throws IOException {
final Request indexRequest = new Request("POST", "/" + index + "/" + type + "/" + id);
- indexRequest.setJsonEntity(Strings.toString(JsonXContent.contentBuilder().startObject().field("f", "v").endObject()));
+ indexRequest.setJsonEntity(JsonXContent.contentBuilder().startObject().field("f", "v").endObject().toString());
assertOK(client().performRequest(indexRequest));
}
@@ -1190,7 +1190,7 @@ private void saveInfoDocument(String id, String value) throws IOException {
// Only create the first version so we know how many documents are created when the index is first created
Request request = new Request("PUT", "/info/" + type + "/" + id);
request.addParameter("op_type", "create");
- request.setJsonEntity(Strings.toString(infoDoc));
+ request.setJsonEntity(infoDoc.toString());
client().performRequest(request);
}
@@ -1255,7 +1255,7 @@ public void testPeerRecoveryRetentionLeases() throws Exception {
settings.endObject();
Request createIndex = new Request("PUT", "/" + index);
- createIndex.setJsonEntity(Strings.toString(settings));
+ createIndex.setJsonEntity(settings.toString());
client().performRequest(createIndex);
}
ensureGreen(index);
@@ -1485,7 +1485,7 @@ public void testEnableSoftDeletesOnRestore() throws Exception {
}
repoConfig.endObject();
Request createRepoRequest = new Request("PUT", "/_snapshot/repo");
- createRepoRequest.setJsonEntity(Strings.toString(repoConfig));
+ createRepoRequest.setJsonEntity(repoConfig.toString());
client().performRequest(createRepoRequest);
// create snapshot
Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + snapshot);
@@ -1507,7 +1507,7 @@ public void testEnableSoftDeletesOnRestore() throws Exception {
restoreCommand.endObject();
Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshot + "/_restore");
restoreRequest.addParameter("wait_for_completion", "true");
- restoreRequest.setJsonEntity(Strings.toString(restoreCommand));
+ restoreRequest.setJsonEntity(restoreCommand.toString());
client().performRequest(restoreRequest);
ensureGreen(restoredIndex);
int numDocs = countOfIndexedRandomDocuments();
@@ -1539,7 +1539,7 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception {
}
repoConfig.endObject();
Request createRepoRequest = new Request("PUT", "/_snapshot/repo");
- createRepoRequest.setJsonEntity(Strings.toString(repoConfig));
+ createRepoRequest.setJsonEntity(repoConfig.toString());
client().performRequest(createRepoRequest);
// create snapshot
Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + snapshot);
@@ -1560,7 +1560,7 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception {
restoreCommand.endObject();
Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshot + "/_restore");
restoreRequest.addParameter("wait_for_completion", "true");
- restoreRequest.setJsonEntity(Strings.toString(restoreCommand));
+ restoreRequest.setJsonEntity(restoreCommand.toString());
final ResponseException error = expectThrows(ResponseException.class, () -> client().performRequest(restoreRequest));
assertThat(error.getMessage(), containsString("cannot disable setting [index.soft_deletes.enabled] on restore"));
}
diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java
index 724ac9883efaa..aabc3aee8887f 100644
--- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java
+++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java
@@ -36,7 +36,6 @@
import org.apache.hc.core5.http.io.entity.EntityUtils;
import org.opensearch.client.Request;
import org.opensearch.client.Response;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.InputStreamStreamInput;
import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
@@ -199,7 +198,7 @@ public void testQueryBuilderBWC() throws Exception {
mappingsAndSettings.endObject();
Request request = new Request("PUT", "/" + index);
request.setOptions(allowTypesRemovalWarnings());
- request.setJsonEntity(Strings.toString(mappingsAndSettings));
+ request.setJsonEntity(mappingsAndSettings.toString());
Response rsp = client().performRequest(request);
assertEquals(200, rsp.getStatusLine().getStatusCode());
diff --git a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java
index f9810e027bb1e..75f7e00f499c6 100644
--- a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java
+++ b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java
@@ -42,7 +42,6 @@
import org.opensearch.client.ResponseException;
import org.opensearch.client.RestClient;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.support.XContentMapValues;
@@ -365,15 +364,14 @@ public void testUpdateSnapshotStatus() throws Exception {
// Create the repository before taking the snapshot.
Request request = new Request("PUT", "/_snapshot/repo");
- request.setJsonEntity(Strings
- .toString(JsonXContent.contentBuilder()
+ request.setJsonEntity(JsonXContent.contentBuilder()
.startObject()
.field("type", "fs")
.startObject("settings")
.field("compress", randomBoolean())
.field("location", System.getProperty("tests.path.repo"))
.endObject()
- .endObject()));
+ .endObject().toString());
assertOK(client().performRequest(request));
diff --git a/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java
index 5f52250c7fb0c..1f2409741a878 100644
--- a/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java
+++ b/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java
@@ -53,12 +53,12 @@
import org.opensearch.client.RestHighLevelClient;
import org.opensearch.client.indices.CreateIndexRequest;
import org.opensearch.client.indices.CreateIndexResponse;
-import org.opensearch.common.Strings;
-import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.util.io.IOUtils;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.index.query.InnerHitBuilder;
import org.opensearch.index.query.MatchQueryBuilder;
import org.opensearch.index.query.QueryBuilders;
diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java
index b60ee09d39048..a03d299b32274 100644
--- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java
+++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java
@@ -43,17 +43,20 @@
import org.opensearch.index.codec.CodecService;
import org.opensearch.index.engine.EngineConfig;
import org.opensearch.indices.replication.common.ReplicationType;
+import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.rest.yaml.ObjectPath;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
import static org.opensearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM;
+import static org.opensearch.test.OpenSearchIntegTestCase.CODECS;
/**
* Basic test that indexed documents survive the rolling restart. See
@@ -267,7 +270,11 @@ public void testIndexingWithSegRep() throws Exception {
.put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
.put(
EngineConfig.INDEX_CODEC_SETTING.getKey(),
- randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC)
+ randomFrom(new ArrayList<>(CODECS) {
+ {
+ add(CodecService.LUCENE_DEFAULT_CODEC);
+ }
+ })
)
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms");
createIndex(indexName, settings.build());
diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java
index 3dd9f371f06fd..6d143d08452e9 100644
--- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java
+++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java
@@ -40,11 +40,11 @@
import org.opensearch.cluster.metadata.MetadataIndexStateService;
import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.opensearch.common.Booleans;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.concurrent.AbstractRunnable;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.support.XContentMapValues;
+import org.opensearch.core.common.Strings;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.core.rest.RestStatus;
diff --git a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java
index 4a898d816bbf4..b1143ad647327 100644
--- a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java
+++ b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java
@@ -48,8 +48,8 @@
import org.opensearch.client.Response;
import org.opensearch.client.ResponseListener;
import org.opensearch.common.SetOnce;
-import org.opensearch.common.Strings;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
import org.opensearch.plugins.Plugin;
import org.opensearch.plugins.PluginsService;
import org.opensearch.script.MockScriptPlugin;
@@ -83,7 +83,6 @@
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.awaitLatch;
public class SearchRestCancellationIT extends HttpSmokeTestCase {
diff --git a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java
index 0dc62b160ff3f..5f0f468898c47 100644
--- a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java
+++ b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java
@@ -35,7 +35,7 @@
import org.opensearch.LegacyESVersion;
import org.opensearch.client.Request;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.index.IndexSettings;
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java
index 4cbcc5b9bb507..966c040d7877c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java
@@ -49,7 +49,6 @@
import org.opensearch.cluster.routing.allocation.NodeAllocationResult;
import org.opensearch.cluster.routing.allocation.decider.Decision;
import org.opensearch.common.Priority;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.set.Sets;
@@ -1275,7 +1274,7 @@ private ClusterAllocationExplanation runExplain(boolean primary, String nodeId,
XContentBuilder builder = JsonXContent.contentBuilder();
builder.prettyPrint();
builder.humanReadable(true);
- logger.debug("--> explain json output: \n{}", Strings.toString(explanation.toXContent(builder, ToXContent.EMPTY_PARAMS)));
+ logger.debug("--> explain json output: \n{}", explanation.toXContent(builder, ToXContent.EMPTY_PARAMS).toString());
}
return explanation;
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java
index 51598d7775623..a75448dadf427 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java
@@ -65,7 +65,7 @@
import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.opensearch.common.Priority;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.index.Index;
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java
index 7f175289f3a88..d6a7dcf4b6152 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java
@@ -47,8 +47,8 @@
import org.opensearch.cluster.routing.allocation.AllocationService;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.time.DateFormatter;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.plugins.Plugin;
import org.opensearch.test.OpenSearchIntegTestCase;
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java
index 850034bc631b1..c77178a5165bc 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java
@@ -41,8 +41,8 @@
import org.opensearch.client.Requests;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.test.OpenSearchIntegTestCase;
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
index 53afa53de92f3..139b2cb896ded 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java
@@ -46,9 +46,9 @@
import org.opensearch.action.update.UpdateResponse;
import org.opensearch.client.Requests;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
import org.opensearch.index.VersionType;
import org.opensearch.indices.IndexClosedException;
import org.opensearch.plugins.Plugin;
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java
index afa5ac908c137..24a66083d85ff 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java
@@ -47,7 +47,7 @@
import org.opensearch.action.support.WriteRequest;
import org.opensearch.client.Client;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreaker;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java
index 9f60e65eca297..7d663dd70edd6 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java
@@ -60,7 +60,7 @@
import org.opensearch.transport.TransportInterceptor;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestOptions;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportService;
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java
index 61171b0a817b0..4683535a3a095 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java
@@ -50,7 +50,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java
index 637cc96bdfc44..14b6ffcd50825 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java
@@ -46,10 +46,10 @@
import org.opensearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand;
import org.opensearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand;
import org.opensearch.cluster.service.ClusterService;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.set.Sets;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
import org.opensearch.gateway.GatewayAllocator;
import org.opensearch.index.IndexNotFoundException;
import org.opensearch.index.engine.Engine;
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java
index cafce1a194caa..2d2714723802f 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java
@@ -59,8 +59,8 @@
import org.opensearch.common.io.PathUtils;
import org.opensearch.common.io.PathUtilsForTesting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.env.Environment;
import org.opensearch.env.NodeEnvironment;
import org.opensearch.index.IndexSettings;
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java
index 79b674b23fd48..f1c34aa4f4141 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java
@@ -42,7 +42,7 @@
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.settings.SettingsException;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.indices.recovery.RecoverySettings;
import org.opensearch.test.OpenSearchIntegTestCase;
diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java
index 23335f6e82ef1..9433bebc24c20 100644
--- a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java
@@ -46,7 +46,7 @@
import org.opensearch.common.Priority;
import org.opensearch.common.network.NetworkModule;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.indices.ShardLimitValidator;
import org.opensearch.snapshots.SnapshotInfo;
diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java
index 581b352e917f0..10e6aa906ecc9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java
@@ -43,7 +43,6 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.support.WriteRequest.RefreshPolicy;
import org.opensearch.cluster.health.ClusterHealthStatus;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
@@ -117,10 +116,10 @@ public void testIndexActions() throws Exception {
for (int i = 0; i < 5; i++) {
getResult = client().prepareGet("test", "1").execute().actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
- assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
+ assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").toString()));
assertThat("cycle(map) #" + i, (String) getResult.getSourceAsMap().get("name"), equalTo("test"));
getResult = client().get(getRequest("test").id("1")).actionGet();
- assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
+ assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").toString()));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
}
@@ -168,10 +167,10 @@ public void testIndexActions() throws Exception {
for (int i = 0; i < 5; i++) {
getResult = client().get(getRequest("test").id("1")).actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
- assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test"))));
+ assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").toString()));
getResult = client().get(getRequest("test").id("2")).actionGet();
String ste1 = getResult.getSourceAsString();
- String ste2 = Strings.toString(source("2", "test2"));
+ String ste2 = source("2", "test2").toString();
assertThat("cycle #" + i, ste1, equalTo(ste2));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
}
@@ -258,15 +257,15 @@ public void testBulk() throws Exception {
assertThat("cycle #" + i, getResult.isExists(), equalTo(false));
getResult = client().get(getRequest("test").id("2")).actionGet();
- assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("2", "test"))));
+ assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("2", "test").toString()));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
getResult = client().get(getRequest("test").id(generatedId3)).actionGet();
- assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("3", "test"))));
+ assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("3", "test").toString()));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
getResult = client().get(getRequest("test").id(generatedId4)).actionGet();
- assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("4", "test"))));
+ assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("4", "test").toString()));
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java
index 0b3a689e81b94..c88bf942fa8d0 100644
--- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java
@@ -46,7 +46,6 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.routing.UnassignedInfo;
import org.opensearch.cluster.service.ClusterService;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.env.NodeEnvironment;
@@ -110,16 +109,15 @@ public void testOneNodeRecoverFromGateway() throws Exception {
internalCluster().startNode();
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("appAccountIds")
- .field("type", "text")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("appAccountIds")
+ .field("type", "text")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(prepareCreate("test").setMapping(mapping));
client().prepareIndex("test")
@@ -204,19 +202,18 @@ private Map assertAndCapturePrimaryTerms(Map pre
public void testSingleNodeNoFlush() throws Exception {
internalCluster().startNode();
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .endObject()
- .startObject("num")
- .field("type", "integer")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "text")
+ .endObject()
+ .startObject("num")
+ .field("type", "integer")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
// note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test.
int numberOfShards = numberOfShards();
assertAcked(
diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java
index 9a465c2f9121c..e48e13b471ba5 100644
--- a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java
@@ -38,8 +38,8 @@
import org.opensearch.cluster.routing.UnassignedInfo;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.Priority;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.index.Index;
import org.opensearch.index.IndexService;
diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java
index 997e8e9d5258b..2375c62342533 100644
--- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java
@@ -45,12 +45,12 @@
import org.opensearch.action.index.IndexResponse;
import org.opensearch.core.action.support.DefaultShardOperationFailedException;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
-import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.lucene.uid.Versions;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.index.engine.VersionConflictEngineException;
import org.opensearch.plugins.Plugin;
import org.opensearch.core.rest.RestStatus;
@@ -288,17 +288,16 @@ public void testSimpleMultiGet() throws Exception {
}
public void testGetDocWithMultivaluedFields() throws Exception {
- String mapping1 = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("store", true)
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping1 = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("field")
+ .field("type", "text")
+ .field("store", true)
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(prepareCreate("test").setMapping(mapping1));
ensureGreen();
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java b/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java
index 60ff82e617dbd..cf73c370cce8f 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java
@@ -15,6 +15,7 @@
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.lease.Releasable;
+import org.opensearch.common.util.FeatureFlags;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.index.shard.IndexShardState;
@@ -30,6 +31,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@@ -260,7 +262,7 @@ public void testFailStaleReplica() throws Exception {
public void testWithDocumentReplicationEnabledIndex() throws Exception {
assumeTrue(
"Can't create DocRep index with remote store enabled. Skipping.",
- indexSettings().getAsBoolean(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, false) == false
+ Objects.equals(featureFlagSettings().get(FeatureFlags.REMOTE_STORE, "false"), "false")
);
Settings settings = Settings.builder().put(MAX_REPLICATION_TIME_SETTING.getKey(), TimeValue.timeValueMillis(500)).build();
// Starts a primary and replica node.
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/codec/CodecCompressionLevelIT.java b/server/src/internalClusterTest/java/org/opensearch/index/codec/CodecCompressionLevelIT.java
new file mode 100644
index 0000000000000..5f3e53f1454fc
--- /dev/null
+++ b/server/src/internalClusterTest/java/org/opensearch/index/codec/CodecCompressionLevelIT.java
@@ -0,0 +1,178 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.index.codec;
+
+import org.apache.logging.log4j.core.util.Throwables;
+import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.test.OpenSearchIntegTestCase;
+
+import java.util.concurrent.ExecutionException;
+
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+
+@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST)
+public class CodecCompressionLevelIT extends OpenSearchIntegTestCase {
+
+ public void testLuceneCodecsCreateIndexWithCompressionLevel() {
+
+ internalCluster().ensureAtLeastNumDataNodes(1);
+ final String index = "test-index";
+
+ // creating index
+ assertThrows(
+ IllegalArgumentException.class,
+ () -> createIndex(
+ index,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC))
+ .put("index.codec.compression_level", randomIntBetween(1, 6))
+ .build()
+ )
+ );
+
+ createIndex(
+ index,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC))
+ .build()
+ );
+ ensureGreen(index);
+ }
+
+ public void testZStandardCodecsCreateIndexWithCompressionLevel() {
+
+ internalCluster().ensureAtLeastNumDataNodes(1);
+ final String index = "test-index";
+
+ // creating index
+ createIndex(
+ index,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.codec", randomFrom(CodecService.ZSTD_CODEC, CodecService.ZSTD_NO_DICT_CODEC))
+ .put("index.codec.compression_level", randomIntBetween(1, 6))
+ .build()
+ );
+
+ ensureGreen(index);
+ }
+
+ public void testZStandardToLuceneCodecsWithCompressionLevel() throws ExecutionException, InterruptedException {
+
+ internalCluster().ensureAtLeastNumDataNodes(1);
+ final String index = "test-index";
+
+ // creating index
+ createIndex(
+ index,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.codec", randomFrom(CodecService.ZSTD_CODEC, CodecService.ZSTD_NO_DICT_CODEC))
+ .put("index.codec.compression_level", randomIntBetween(1, 6))
+ .build()
+ );
+ ensureGreen(index);
+
+ assertAcked(client().admin().indices().prepareClose(index));
+
+ Throwable executionException = expectThrows(
+ ExecutionException.class,
+ () -> client().admin()
+ .indices()
+ .updateSettings(
+ new UpdateSettingsRequest(index).settings(
+ Settings.builder().put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC))
+ )
+ )
+ .get()
+ );
+
+ Throwable rootCause = Throwables.getRootCause(executionException);
+ assertEquals(IllegalArgumentException.class, rootCause.getClass());
+ assertTrue(rootCause.getMessage().startsWith("Compression level cannot be set"));
+
+ assertAcked(
+ client().admin()
+ .indices()
+ .updateSettings(
+ new UpdateSettingsRequest(index).settings(
+ Settings.builder()
+ .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC))
+ .put("index.codec.compression_level", (String) null)
+ )
+ )
+ .get()
+ );
+
+ assertAcked(client().admin().indices().prepareOpen(index));
+ ensureGreen(index);
+ }
+
+ public void testLuceneToZStandardCodecsWithCompressionLevel() throws ExecutionException, InterruptedException {
+
+ internalCluster().ensureAtLeastNumDataNodes(1);
+ final String index = "test-index";
+
+ // creating index
+ createIndex(
+ index,
+ Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC))
+ .build()
+ );
+ ensureGreen(index);
+
+ assertAcked(client().admin().indices().prepareClose(index));
+
+ Throwable executionException = expectThrows(
+ ExecutionException.class,
+ () -> client().admin()
+ .indices()
+ .updateSettings(
+ new UpdateSettingsRequest(index).settings(
+ Settings.builder()
+ .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC))
+ .put("index.codec.compression_level", randomIntBetween(1, 6))
+ )
+ )
+ .get()
+ );
+
+ Throwable rootCause = Throwables.getRootCause(executionException);
+ assertEquals(IllegalArgumentException.class, rootCause.getClass());
+ assertTrue(rootCause.getMessage().startsWith("Compression level cannot be set"));
+
+ assertAcked(
+ client().admin()
+ .indices()
+ .updateSettings(
+ new UpdateSettingsRequest(index).settings(
+ Settings.builder()
+ .put("index.codec", randomFrom(CodecService.ZSTD_CODEC, CodecService.ZSTD_NO_DICT_CODEC))
+ .put("index.codec.compression_level", randomIntBetween(1, 6))
+ )
+ )
+ .get()
+ );
+
+ assertAcked(client().admin().indices().prepareOpen(index));
+ ensureGreen(index);
+ }
+
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecMergeIT.java
index 2866292e5e2e0..0bc78d82d9dfd 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecMergeIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/codec/MultiCodecMergeIT.java
@@ -45,11 +45,15 @@ public void testForceMergeMultipleCodecs() throws ExecutionException, Interrupte
Map codecMap = Map.of(
"best_compression",
"BEST_COMPRESSION",
+ "zlib",
+ "BEST_COMPRESSION",
"zstd_no_dict",
"ZSTD_NO_DICT",
"zstd",
"ZSTD",
"default",
+ "BEST_SPEED",
+ "lz4",
"BEST_SPEED"
);
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java
index f4ccea40e6e3f..a25b6049e92a0 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java
@@ -33,7 +33,6 @@
package org.opensearch.index.mapper;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.index.query.QueryBuilders;
@@ -78,16 +77,15 @@ public void testDynamicTemplateCopyTo() throws Exception {
}
public void testDynamicObjectCopyTo() throws Exception {
- String mapping = Strings.toString(
- jsonBuilder().startObject()
- .startObject("properties")
- .startObject("foo")
- .field("type", "text")
- .field("copy_to", "root.top.child")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = jsonBuilder().startObject()
+ .startObject("properties")
+ .startObject("foo")
+ .field("type", "text")
+ .field("copy_to", "root.top.child")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(client().admin().indices().prepareCreate("test-idx").setMapping(mapping));
client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get();
client().admin().indices().prepareRefresh("test-idx").execute().actionGet();
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
index ba9f335cd24d4..d9eeb3f7f8f42 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
@@ -53,13 +53,13 @@
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.CheckedFunction;
import org.opensearch.common.CheckedRunnable;
-import org.opensearch.common.Strings;
import org.opensearch.common.UUIDs;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.common.lucene.uid.Versions;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.util.io.IOUtils;
@@ -84,7 +84,7 @@
import org.opensearch.index.translog.Translog;
import org.opensearch.index.translog.TranslogStats;
import org.opensearch.indices.IndicesService;
-import org.opensearch.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
import org.opensearch.indices.recovery.RecoveryState;
import org.opensearch.indices.replication.checkpoint.SegmentReplicationCheckpointPublisher;
import org.opensearch.plugins.Plugin;
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java
index b6124ff09d992..986155b99217e 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java
@@ -65,8 +65,8 @@
import org.opensearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand;
import org.opensearch.common.io.PathUtils;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.index.shard.ShardId;
import org.opensearch.env.Environment;
import org.opensearch.env.NodeEnvironment;
@@ -105,7 +105,7 @@
import java.util.stream.StreamSupport;
import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS;
-import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList;
+import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList;
import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java
index d51e4bbff11b5..52ee6fd10ced7 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java
@@ -66,8 +66,8 @@
import org.opensearch.common.io.stream.BytesStreamOutput;
import org.opensearch.common.lucene.Lucene;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.env.NodeEnvironment;
import org.opensearch.core.index.Index;
@@ -112,7 +112,7 @@
import java.util.stream.Collectors;
import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS;
-import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList;
+import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAllSuccessful;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java
index 1dd0f6a3d664e..6cc1c51ed65d9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java
@@ -38,8 +38,8 @@
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.cluster.routing.UnassignedInfo;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.MockEngineFactoryPlugin;
import org.opensearch.index.translog.TestTranslog;
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java
index 30c6a0dc068e5..1525c7bada9ac 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java
@@ -34,7 +34,6 @@
import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -187,22 +186,22 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception {
.get();
XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint();
response.toXContent(responseBuilder, new ToXContent.MapParams(params));
- String responseStrings = Strings.toString(responseBuilder);
+ String responseStrings = responseBuilder.toString();
XContentBuilder prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint();
prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings));
- assertThat(responseStrings, equalTo(Strings.toString(prettyJsonBuilder)));
+ assertThat(responseStrings, equalTo(prettyJsonBuilder.toString()));
params.put("pretty", "false");
response = client().admin().indices().prepareGetFieldMappings("index").setFields("field1", "obj.subfield").get();
responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd();
response.toXContent(responseBuilder, new ToXContent.MapParams(params));
- responseStrings = Strings.toString(responseBuilder);
+ responseStrings = responseBuilder.toString();
prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint();
prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings));
- assertThat(responseStrings, not(equalTo(Strings.toString(prettyJsonBuilder))));
+ assertThat(responseStrings, not(equalTo(prettyJsonBuilder).toString()));
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java
index 2ab44f8318617..10bd179ddc5fd 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java
@@ -46,17 +46,17 @@
import org.opensearch.client.Requests;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.breaker.CircuitBreakingException;
-import org.opensearch.common.breaker.NoopCircuitBreaker;
+import org.opensearch.core.common.breaker.NoopCircuitBreaker;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
-import org.opensearch.common.xcontent.XContentType;
-import org.opensearch.indices.breaker.CircuitBreakerStats;
-import org.opensearch.indices.breaker.HierarchyCircuitBreakerService;
+import org.opensearch.core.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
+import org.opensearch.core.indices.breaker.CircuitBreakerStats;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.rest.RestStatus;
+import org.opensearch.indices.breaker.HierarchyCircuitBreakerService;
import org.opensearch.search.sort.SortOrder;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
@@ -197,7 +197,7 @@ public void testRamAccountingTermsEnum() throws Exception {
prepareCreate("ramtest").setSource(
"{\"mappings\": {\"type\": {\"properties\": {\"test\": "
+ "{\"type\": \"text\",\"fielddata\": true,\"fielddata_frequency_filter\": {\"max\": 10000}}}}}}",
- XContentType.JSON
+ MediaTypeRegistry.JSON
)
);
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java
index 341c0a965f94e..62efdc6a722ee 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java
@@ -43,8 +43,7 @@
import org.opensearch.action.admin.indices.refresh.RefreshResponse;
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchRequestBuilder;
-import org.opensearch.common.Strings;
-import org.opensearch.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreaker;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
import org.opensearch.common.settings.Settings;
@@ -99,22 +98,20 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc
assertThat("Breaker is not set to 0", node.getBreaker().getStats(CircuitBreaker.FIELDDATA).getEstimated(), equalTo(0L));
}
- String mapping = Strings // {}
- .toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("test-str")
- .field("type", "keyword")
- .field("doc_values", randomBoolean())
- .endObject() // test-str
- .startObject("test-num")
- // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double"
- .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer")))
- .endObject() // test-num
- .endObject() // properties
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("test-str")
+ .field("type", "keyword")
+ .field("doc_values", randomBoolean())
+ .endObject() // test-str
+ .startObject("test-num")
+ // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double"
+ .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer")))
+ .endObject() // test-num
+ .endObject() // properties
+ .endObject()
+ .toString();
final double topLevelRate;
final double lowLevelRate;
if (frequently()) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java
index 32a10451a0dd3..e9962706bcd39 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java
@@ -56,14 +56,16 @@ public class IndexPrimaryRelocationIT extends OpenSearchIntegTestCase {
private static final int RELOCATION_COUNT = 15;
+ public void setup() {}
+
+ public Settings indexSettings() {
+ return Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build();
+ }
+
public void testPrimaryRelocationWhileIndexing() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(2, 3));
- client().admin()
- .indices()
- .prepareCreate("test")
- .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
- .setMapping("field", "type=text")
- .get();
+ setup();
+ client().admin().indices().prepareCreate("test").setSettings(indexSettings()).setMapping("field", "type=text").get();
ensureGreen("test");
AtomicInteger numAutoGenDocs = new AtomicInteger();
final AtomicBoolean finished = new AtomicBoolean(false);
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
index c31b5e1f3bc5b..efd43ec5ad82d 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java
@@ -73,13 +73,13 @@
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.Priority;
import org.opensearch.common.SetOnce;
-import org.opensearch.common.Strings;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.common.concurrent.GatedCloseable;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java
index 64c6ebbb33482..cfb2e11c8c429 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java
@@ -8,7 +8,6 @@
package org.opensearch.indices.replication;
-import org.opensearch.action.admin.indices.replication.SegmentReplicationStatsResponse;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.metadata.IndexMetadata;
@@ -24,7 +23,6 @@
import org.opensearch.core.index.shard.ShardId;
import org.opensearch.index.IndexModule;
import org.opensearch.index.IndexService;
-import org.opensearch.index.SegmentReplicationPerGroupStats;
import org.opensearch.index.SegmentReplicationShardStats;
import org.opensearch.index.engine.Engine;
import org.opensearch.index.shard.IndexShard;
@@ -134,24 +132,6 @@ protected void waitForSearchableDocs(long docCount, String... nodes) throws Exce
waitForSearchableDocs(docCount, Arrays.stream(nodes).collect(Collectors.toList()));
}
- protected void waitForSegmentReplication(String node) throws Exception {
- assertBusy(() -> {
- SegmentReplicationStatsResponse segmentReplicationStatsResponse = client(node).admin()
- .indices()
- .prepareSegmentReplicationStats(INDEX_NAME)
- .setDetailed(true)
- .execute()
- .actionGet();
- final SegmentReplicationPerGroupStats perGroupStats = segmentReplicationStatsResponse.getReplicationStats()
- .get(INDEX_NAME)
- .get(0);
- assertEquals(
- perGroupStats.getReplicaStats().stream().findFirst().get().getCurrentReplicationState().getStage(),
- SegmentReplicationState.Stage.DONE
- );
- }, 1, TimeUnit.MINUTES);
- }
-
protected void verifyStoreContent() throws Exception {
assertBusy(() -> {
final ClusterState clusterState = getClusterState();
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java
index 2a7e8e58b2d03..4b314ef1ae27b 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java
@@ -44,6 +44,8 @@
import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.cluster.routing.ShardRoutingState;
import org.opensearch.cluster.routing.allocation.command.CancelAllocationCommand;
+import org.opensearch.common.collect.Tuple;
+import org.opensearch.common.concurrent.GatedCloseable;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.lucene.index.OpenSearchDirectoryReader;
import org.opensearch.common.settings.Settings;
@@ -60,6 +62,7 @@
import org.opensearch.index.shard.IndexShard;
import org.opensearch.core.index.shard.ShardId;
import org.opensearch.indices.recovery.FileChunkRequest;
+import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint;
import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.search.SearchService;
import org.opensearch.search.builder.PointInTimeBuilder;
@@ -88,10 +91,14 @@
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
import static org.opensearch.index.query.QueryBuilders.matchQuery;
+import static org.opensearch.index.query.QueryBuilders.termQuery;
+import static org.opensearch.index.query.QueryBuilders.boolQuery;
+import static org.opensearch.index.query.QueryBuilders.rangeQuery;
import static org.opensearch.indices.replication.SegmentReplicationTarget.REPLICATION_PREFIX;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAllSuccessful;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchHits;
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0)
@@ -201,10 +208,11 @@ public void testReplicationAfterPrimaryRefreshAndFlush() throws Exception {
final String nodeB = internalCluster().startDataOnlyNode();
final Settings settings = Settings.builder()
.put(indexSettings())
- .put(
- EngineConfig.INDEX_CODEC_SETTING.getKey(),
- randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC)
- )
+ .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), randomFrom(new ArrayList<>(CODECS) {
+ {
+ add(CodecService.LUCENE_DEFAULT_CODEC);
+ }
+ }))
.build();
createIndex(INDEX_NAME, settings);
ensureGreen(INDEX_NAME);
@@ -982,8 +990,11 @@ public void testScrollCreatedOnReplica() throws Exception {
)
);
final IndexShard replicaShard = getIndexShard(replica, INDEX_NAME);
- final SegmentInfos segmentInfos = replicaShard.getLatestSegmentInfosAndCheckpoint().v1().get();
- final Collection snapshottedSegments = segmentInfos.files(false);
+ final Tuple, ReplicationCheckpoint> tuple = replicaShard.getLatestSegmentInfosAndCheckpoint();
+ final Collection snapshottedSegments;
+ try (final GatedCloseable closeable = tuple.v1()) {
+ snapshottedSegments = closeable.get().files(false);
+ }
// opens a scrolled query before a flush is called.
// this is for testing scroll segment consistency between refresh and flush
SearchResponse searchResponse = client(replica).prepareSearch()
@@ -1177,6 +1188,10 @@ public void testScrollWithOngoingSegmentReplication() throws Exception {
}
public void testPitCreatedOnReplica() throws Exception {
+ assumeFalse(
+ "Skipping the test as it is flaky with remote store. Tracking issue https://github.com/opensearch-project/OpenSearch/issues/8850",
+ segmentReplicationWithRemoteEnabled()
+ );
final String primary = internalCluster().startDataOnlyNode();
createIndex(INDEX_NAME);
ensureYellowAndNoInitializingShards(INDEX_NAME);
@@ -1338,4 +1353,76 @@ public void testPrimaryReceivesDocsDuringReplicaRecovery() throws Exception {
ensureGreen(INDEX_NAME);
waitForSearchableDocs(2, nodes);
}
+
+ public void testIndexWhileRecoveringReplica() throws Exception {
+ final String primaryNode = internalCluster().startDataOnlyNode();
+ assertAcked(
+ prepareCreate(INDEX_NAME).setMapping(
+ jsonBuilder().startObject()
+ .startObject("_routing")
+ .field("required", true)
+ .endObject()
+ .startObject("properties")
+ .startObject("online")
+ .field("type", "boolean")
+ .endObject()
+ .startObject("ts")
+ .field("type", "date")
+ .field("ignore_malformed", false)
+ .field("format", "epoch_millis")
+ .endObject()
+ .startObject("bs")
+ .field("type", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ )
+ );
+ ensureYellow(INDEX_NAME);
+ final String replicaNode = internalCluster().startDataOnlyNode();
+
+ client().prepareIndex(INDEX_NAME)
+ .setId("1")
+ .setRouting("Y")
+ .setSource("online", false, "bs", "Y", "ts", System.currentTimeMillis() - 100, "type", "s")
+ .get();
+ client().prepareIndex(INDEX_NAME)
+ .setId("2")
+ .setRouting("X")
+ .setSource("online", true, "bs", "X", "ts", System.currentTimeMillis() - 10000000, "type", "s")
+ .get();
+ client().prepareIndex(INDEX_NAME)
+ .setId("3")
+ .setRouting(randomAlphaOfLength(2))
+ .setSource("online", false, "ts", System.currentTimeMillis() - 100, "type", "bs")
+ .get();
+ client().prepareIndex(INDEX_NAME)
+ .setId("4")
+ .setRouting(randomAlphaOfLength(2))
+ .setSource("online", true, "ts", System.currentTimeMillis() - 123123, "type", "bs")
+ .get();
+ refresh();
+ ensureGreen(INDEX_NAME);
+ waitForSearchableDocs(4, primaryNode, replicaNode);
+
+ SearchResponse response = client().prepareSearch(INDEX_NAME)
+ .setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
+ .setQuery(
+ boolQuery().must(termQuery("online", true))
+ .must(
+ boolQuery().should(
+ boolQuery().must(rangeQuery("ts").lt(System.currentTimeMillis() - (15 * 1000))).must(termQuery("type", "bs"))
+ )
+ .should(
+ boolQuery().must(rangeQuery("ts").lt(System.currentTimeMillis() - (15 * 1000))).must(termQuery("type", "s"))
+ )
+ )
+ )
+ .setVersion(true)
+ .setFrom(0)
+ .setSize(100)
+ .setExplain(true)
+ .get();
+ assertNoFailures(response);
+ }
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java
index 7cf7e5148dd4a..3024eeb798b48 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java
@@ -60,7 +60,7 @@ public void testPrimaryRelocation() throws Exception {
createIndex(1);
final String replica = internalCluster().startNode();
ensureGreen(INDEX_NAME);
- final int initialDocCount = scaledRandomIntBetween(100, 1000);
+ final int initialDocCount = scaledRandomIntBetween(10, 100);
final WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values());
final List> pendingIndexResponses = new ArrayList<>();
for (int i = 0; i < initialDocCount; i++) {
@@ -137,7 +137,7 @@ public void testPrimaryRelocationWithSegRepFailure() throws Exception {
createIndex(1);
final String replica = internalCluster().startNode();
ensureGreen(INDEX_NAME);
- final int initialDocCount = scaledRandomIntBetween(100, 1000);
+ final int initialDocCount = scaledRandomIntBetween(10, 100);
final WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values());
final List> pendingIndexResponses = new ArrayList<>();
for (int i = 0; i < initialDocCount; i++) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java
index 28bd5a6ae252d..e95f10bd7abdb 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java
@@ -46,8 +46,8 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.util.set.Sets;
import org.opensearch.index.IndexNotFoundException;
import org.opensearch.index.IndexSettings;
diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java
index fde30f35d1b6d..3bbd4f83d2b3c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java
@@ -44,7 +44,6 @@
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.Client;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.index.IndexNotFoundException;
@@ -301,16 +300,15 @@ public void testOpenWaitingForActiveShardsFailed() throws Exception {
}
public void testOpenCloseWithDocs() throws IOException, ExecutionException, InterruptedException {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("test")
- .field("type", "keyword")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("test")
+ .field("type", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping));
ensureGreen();
diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java
index a675eb7c77344..5b72eaca0b4e5 100644
--- a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java
@@ -46,7 +46,7 @@
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.index.IndexService;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.shard.DocsStats;
diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java
index b5d7bd476059d..2454f6553951e 100644
--- a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java
@@ -39,8 +39,8 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.indices.recovery.PeerRecoveryTargetService;
import org.opensearch.indices.recovery.FileChunkRequest;
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java
index 709c027c3f347..5bfbbc11da77d 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java
@@ -29,6 +29,7 @@
import java.util.Set;
import java.util.stream.Collectors;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
public abstract class AbstractRemoteStoreMockRepositoryIntegTestCase extends AbstractSnapshotIntegTestCase {
@@ -46,7 +47,7 @@ protected Settings featureFlagSettings() {
public void setup() {
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
FeatureFlagSetter.set(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL);
- internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REPOSITORY_NAME, TRANSLOG_REPOSITORY_NAME));
}
@Override
@@ -62,9 +63,6 @@ protected Settings remoteStoreIndexSettings(int numberOfReplicas) {
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false)
.put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
- .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, TRANSLOG_REPOSITORY_NAME)
.build();
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRep.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRepIT.java
similarity index 82%
rename from server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRep.java
rename to server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRepIT.java
index 2abf4fc50ec69..32c02332e05b2 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRep.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRepIT.java
@@ -16,12 +16,16 @@
import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.test.OpenSearchIntegTestCase;
+import java.util.Locale;
+
import static org.hamcrest.Matchers.containsString;
+import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED;
+import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE;
import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST)
-public class CreateRemoteIndexClusterDefaultDocRep extends CreateRemoteIndexIT {
+public class CreateRemoteIndexClusterDefaultDocRepIT extends CreateRemoteIndexIT {
@Override
protected Settings nodeSettings(int nodeOriginal) {
@@ -44,7 +48,15 @@ public void testDefaultRemoteStoreNoUserOverride() throws Exception {
);
assertThat(
exc.getMessage(),
- containsString("Cannot enable [index.remote_store.enabled] when [index.replication.type] is DOCUMENT")
+ containsString(
+ String.format(
+ Locale.ROOT,
+ "To enable %s, %s should be set to %s",
+ SETTING_REMOTE_STORE_ENABLED,
+ SETTING_REPLICATION_TYPE,
+ ReplicationType.SEGMENT
+ )
+ )
);
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java
index e52a12f66cff4..7683651e902b2 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java
@@ -26,14 +26,10 @@
import static org.hamcrest.Matchers.containsString;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY;
-import static org.opensearch.index.IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE;
-import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_REPOSITORY_SETTING;
-import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING;
-import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING;
-import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING;
-import static org.opensearch.indices.IndicesService.CLUSTER_SETTING_REPLICATION_TYPE;
+import static org.opensearch.index.IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST)
@@ -50,10 +46,7 @@ public void teardown() {
protected Settings nodeSettings(int nodeOriginal) {
Settings settings = super.nodeSettings(nodeOriginal);
Settings.Builder builder = Settings.builder()
- .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT)
- .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true)
- .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1")
- .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1")
+ .put(remoteStoreClusterSettings("my-segment-repo-1", "my-translog-repo-1"))
.put(settings);
return builder.build();
}
@@ -111,19 +104,20 @@ public void testRemoteStoreDisabledByUser() throws Exception {
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.put(SETTING_REMOTE_STORE_ENABLED, false)
.build();
- assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get());
- GetIndexResponse getIndexResponse = client().admin()
- .indices()
- .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true))
- .get();
- Settings indexSettings = getIndexResponse.settings().get("test-idx-1");
- verifyRemoteStoreIndexSettings(
- indexSettings,
- "false",
- null,
- null,
- client().settings().get(CLUSTER_SETTING_REPLICATION_TYPE),
- IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL
+
+ IllegalArgumentException exc = expectThrows(
+ IllegalArgumentException.class,
+ () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()
+ );
+ assertThat(
+ exc.getMessage(),
+ containsString(
+ String.format(
+ Locale.ROOT,
+ "Validation Failed: 1: private index setting [%s] can not be set explicitly;",
+ SETTING_REMOTE_STORE_ENABLED
+ )
+ )
);
}
@@ -161,8 +155,8 @@ public void testRemoteStoreEnabledByUserWithoutRemoteRepoIllegalArgumentExceptio
containsString(
String.format(
Locale.ROOT,
- "Setting %s should be provided with non-empty repository ID",
- SETTING_REMOTE_SEGMENT_STORE_REPOSITORY
+ "Validation Failed: 1: private index setting [%s] can not be set explicitly;",
+ SETTING_REMOTE_STORE_ENABLED
)
)
);
@@ -174,19 +168,21 @@ public void testReplicationTypeDocumentByUser() throws Exception {
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.put(SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT)
.build();
- assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get());
- GetIndexResponse getIndexResponse = client().admin()
- .indices()
- .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true))
- .get();
- Settings indexSettings = getIndexResponse.settings().get("test-idx-1");
- verifyRemoteStoreIndexSettings(
- indexSettings,
- null,
- null,
- null,
- ReplicationType.DOCUMENT.toString(),
- IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL
+ IllegalArgumentException exc = expectThrows(
+ IllegalArgumentException.class,
+ () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()
+ );
+ assertThat(
+ exc.getMessage(),
+ containsString(
+ String.format(
+ Locale.ROOT,
+ "To enable %s, %s should be set to %s",
+ SETTING_REMOTE_STORE_ENABLED,
+ SETTING_REPLICATION_TYPE,
+ ReplicationType.SEGMENT
+ )
+ )
);
}
@@ -213,7 +209,7 @@ public void testRemoteStoreSegmentRepoWithoutRemoteEnabledAndSegmentReplicationI
);
}
- public void testRemoteStoreEnabledByUserWithRemoteRepo() throws Exception {
+ public void testRemoteStoreEnabledByUserWithRemoteRepoIllegalArgumentException() throws Exception {
Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
@@ -222,19 +218,20 @@ public void testRemoteStoreEnabledByUserWithRemoteRepo() throws Exception {
.put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "my-custom-repo")
.build();
- assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get());
- GetIndexResponse getIndexResponse = client().admin()
- .indices()
- .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true))
- .get();
- Settings indexSettings = getIndexResponse.settings().get("test-idx-1");
- verifyRemoteStoreIndexSettings(
- indexSettings,
- "true",
- "my-custom-repo",
- "my-translog-repo-1",
- ReplicationType.SEGMENT.toString(),
- IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL
+ IllegalArgumentException exc = expectThrows(
+ IllegalArgumentException.class,
+ () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()
+ );
+ assertThat(
+ exc.getMessage(),
+ containsString(
+ String.format(
+ Locale.ROOT,
+ "Validation Failed: 1: private index setting [%s] can not be set explicitly;2: private index setting [%s] can not be set explicitly;",
+ SETTING_REMOTE_STORE_ENABLED,
+ SETTING_REMOTE_SEGMENT_STORE_REPOSITORY
+ )
+ )
);
}
@@ -270,41 +267,21 @@ public void testRemoteStoreOverrideTranslogRepoCorrectly() throws Exception {
.put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "my-custom-repo")
.put(SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, "my-custom-repo")
.build();
- assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get());
- GetIndexResponse getIndexResponse = client().admin()
- .indices()
- .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true))
- .get();
- Settings indexSettings = getIndexResponse.settings().get("test-idx-1");
- verifyRemoteStoreIndexSettings(
- indexSettings,
- "true",
- "my-custom-repo",
- "my-custom-repo",
- ReplicationType.SEGMENT.toString(),
- IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL
+ IllegalArgumentException exc = expectThrows(
+ IllegalArgumentException.class,
+ () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()
);
- }
-
- public void testRemoteStoreOverrideReplicationTypeIndexSettings() throws Exception {
- Settings settings = Settings.builder()
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
- .put(SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT)
- .build();
- assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get());
- GetIndexResponse getIndexResponse = client().admin()
- .indices()
- .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true))
- .get();
- Settings indexSettings = getIndexResponse.settings().get("test-idx-1");
- verifyRemoteStoreIndexSettings(
- indexSettings,
- null,
- null,
- null,
- ReplicationType.DOCUMENT.toString(),
- IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL
+ assertThat(
+ exc.getMessage(),
+ containsString(
+ String.format(
+ Locale.ROOT,
+ "Validation Failed: 1: private index setting [%s] can not be set explicitly;2: private index setting [%s] can not be set explicitly;3: private index setting [%s] can not be set explicitly;",
+ SETTING_REMOTE_STORE_ENABLED,
+ SETTING_REMOTE_SEGMENT_STORE_REPOSITORY,
+ SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY
+ )
+ )
);
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java
index 9d63c9b528314..ee32c880257d1 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java
@@ -61,6 +61,7 @@ public void testPrimaryTermValidation() throws Exception {
.put(FollowersChecker.FOLLOWER_CHECK_TIMEOUT_SETTING.getKey(), "1s")
.put(FollowersChecker.FOLLOWER_CHECK_INTERVAL_SETTING.getKey(), "1s")
.put(FollowersChecker.FOLLOWER_CHECK_RETRY_COUNT_SETTING.getKey(), 1)
+ .put(remoteStoreClusterSettings(REPOSITORY_NAME, REPOSITORY_2_NAME, true))
.build();
internalCluster().startClusterManagerOnlyNode(clusterSettings);
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java
new file mode 100644
index 0000000000000..a9482c8c19187
--- /dev/null
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java
@@ -0,0 +1,66 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.remotestore;
+
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
+import org.opensearch.indices.recovery.IndexPrimaryRelocationIT;
+import org.opensearch.indices.replication.common.ReplicationType;
+import org.opensearch.test.OpenSearchIntegTestCase;
+
+import java.nio.file.Path;
+
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+
+@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST)
+public class RemoteIndexPrimaryRelocationIT extends IndexPrimaryRelocationIT {
+
+ protected static final String REPOSITORY_NAME = "test-remote-store-repo";
+
+ protected Path absolutePath;
+
+ public void setup() {
+ absolutePath = randomRepoPath().toAbsolutePath();
+ assertAcked(
+ clusterAdmin().preparePutRepository(REPOSITORY_NAME).setType("fs").setSettings(Settings.builder().put("location", absolutePath))
+ );
+ }
+
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put(remoteStoreClusterSettings(REPOSITORY_NAME, REPOSITORY_NAME, false))
+ .build();
+ }
+
+ @Override
+ protected boolean addMockInternalEngine() {
+ return false;
+ }
+
+ public Settings indexSettings() {
+ return Settings.builder()
+ .put(super.indexSettings())
+ .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .build();
+ }
+
+ @Override
+ protected Settings featureFlagSettings() {
+ return Settings.builder()
+ .put(super.featureFlagSettings())
+ .put(FeatureFlags.REMOTE_STORE, "true")
+ .put(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL, "true")
+ .build();
+ }
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java
index 4f7961cec22d7..d92ac83544a25 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java
@@ -23,15 +23,21 @@
import java.nio.file.Path;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0)
public class RemoteIndexRecoveryIT extends IndexRecoveryIT {
- protected static final String REPOSITORY_NAME = "test-remore-store-repo";
+ protected static final String REPOSITORY_NAME = "test-remote-store-repo";
protected Path absolutePath;
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(remoteStoreClusterSettings(REPOSITORY_NAME)).build();
+ }
+
@Override
protected Settings featureFlagSettings() {
return Settings.builder()
@@ -57,9 +63,6 @@ public Settings indexSettings() {
return Settings.builder()
.put(super.indexSettings())
.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false)
- .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME)
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s")
.put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
.build();
@@ -81,7 +84,91 @@ protected int numDocs() {
}
@Override
- protected boolean shouldAssertOngoingRecoveryInRerouteRecovery() {
- return false;
+ public void testUsesFileBasedRecoveryIfRetentionLeaseMissing() {
+ // Retention lease based tests not applicable for remote store;
+ }
+
+ @Override
+ public void testPeerRecoveryTrimsLocalTranslog() {
+ // Peer recovery usecase not valid for remote enabled indices
+ }
+
+ @Override
+ public void testHistoryRetention() {
+ // History retention not applicable for remote store
+ }
+
+ @Override
+ public void testUsesFileBasedRecoveryIfOperationsBasedRecoveryWouldBeUnreasonable() {
+ // History retention not applicable for remote store
+ }
+
+ @Override
+ public void testUsesFileBasedRecoveryIfRetentionLeaseAheadOfGlobalCheckpoint() {
+ // History retention not applicable for remote store
+ }
+
+ @Override
+ public void testRecoverLocallyUpToGlobalCheckpoint() {
+ // History retention not applicable for remote store
+ }
+
+ @Override
+ public void testCancelNewShardRecoveryAndUsesExistingShardCopy() {
+ // History retention not applicable for remote store
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testReservesBytesDuringPeerRecoveryPhaseOne() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testAllocateEmptyPrimaryResetsGlobalCheckpoint() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testDoesNotCopyOperationsInSafeCommit() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testRepeatedRecovery() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testDisconnectsWhileRecovering() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testTransientErrorsDuringRecoveryAreRetried() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testDoNotInfinitelyWaitForMapping() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testDisconnectsDuringRecovery() {
+
+ }
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919")
+ @Override
+ public void testReplicaRecovery() {
+
}
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java
new file mode 100644
index 0000000000000..8c33bf36ad45d
--- /dev/null
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java
@@ -0,0 +1,530 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.remotestore;
+
+import org.junit.After;
+import org.junit.Before;
+import org.opensearch.action.DocWriteResponse;
+import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest;
+import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
+import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
+import org.opensearch.action.admin.indices.get.GetIndexRequest;
+import org.opensearch.action.admin.indices.get.GetIndexResponse;
+import org.opensearch.action.delete.DeleteResponse;
+import org.opensearch.action.support.PlainActionFuture;
+import org.opensearch.client.Client;
+import org.opensearch.client.Requests;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.io.PathUtils;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
+import org.opensearch.core.rest.RestStatus;
+import org.opensearch.index.IndexSettings;
+import org.opensearch.indices.replication.common.ReplicationType;
+import org.opensearch.snapshots.AbstractSnapshotIntegTestCase;
+import org.opensearch.snapshots.SnapshotState;
+import org.opensearch.test.InternalTestCluster;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.concurrent.ExecutionException;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY;
+import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+
+public class RemoteRestoreSnapshotIT extends AbstractSnapshotIntegTestCase {
+ private static final String BASE_REMOTE_REPO = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX;
+ private Path remoteRepoPath;
+
+ @Before
+ public void setup() {
+ remoteRepoPath = randomRepoPath().toAbsolutePath();
+ createRepository(BASE_REMOTE_REPO, "fs", remoteRepoPath);
+ }
+
+ @After
+ public void teardown() {
+ assertAcked(clusterAdmin().prepareDeleteRepository(BASE_REMOTE_REPO));
+ }
+
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put(FeatureFlags.REMOTE_STORE, "true")
+ .put(remoteStoreClusterSettings(BASE_REMOTE_REPO))
+ .build();
+ }
+
+ private Settings.Builder getIndexSettings(int numOfShards, int numOfReplicas) {
+ Settings.Builder settingsBuilder = Settings.builder()
+ .put(super.indexSettings())
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numOfShards)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numOfReplicas)
+ .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s");
+ return settingsBuilder;
+ }
+
+ private void indexDocuments(Client client, String indexName, int numOfDocs) {
+ indexDocuments(client, indexName, 0, numOfDocs);
+ }
+
+ private void indexDocuments(Client client, String indexName, int fromId, int toId) {
+ for (int i = fromId; i < toId; i++) {
+ String id = Integer.toString(i);
+ client.prepareIndex(indexName).setId(id).setSource("text", "sometext").get();
+ }
+ client.admin().indices().prepareFlush(indexName).get();
+ }
+
+ private void assertDocsPresentInIndex(Client client, String indexName, int numOfDocs) {
+ for (int i = 0; i < numOfDocs; i++) {
+ String id = Integer.toString(i);
+ logger.info("checking for index " + indexName + " with docId" + id);
+ assertTrue("doc with id" + id + " is not present for index " + indexName, client.prepareGet(indexName, id).get().isExists());
+ }
+ }
+
+ public void testRestoreOperationsShallowCopyEnabled() throws IOException, ExecutionException, InterruptedException {
+ String clusterManagerNode = internalCluster().startClusterManagerOnlyNode();
+ String primary = internalCluster().startDataOnlyNode();
+ String indexName1 = "testindex1";
+ String indexName2 = "testindex2";
+ String snapshotRepoName = "test-restore-snapshot-repo";
+ String snapshotName1 = "test-restore-snapshot1";
+ String snapshotName2 = "test-restore-snapshot2";
+ Path absolutePath1 = randomRepoPath().toAbsolutePath();
+ logger.info("Snapshot Path [{}]", absolutePath1);
+ String restoredIndexName1 = indexName1 + "-restored";
+ String restoredIndexName1Seg = indexName1 + "-restored-seg";
+ String restoredIndexName1Doc = indexName1 + "-restored-doc";
+ String restoredIndexName2 = indexName2 + "-restored";
+
+ createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, true));
+
+ Client client = client();
+ Settings indexSettings = getIndexSettings(1, 0).build();
+ createIndex(indexName1, indexSettings);
+
+ Settings indexSettings2 = getIndexSettings(1, 0).build();
+ createIndex(indexName2, indexSettings2);
+
+ final int numDocsInIndex1 = 5;
+ final int numDocsInIndex2 = 6;
+ indexDocuments(client, indexName1, numDocsInIndex1);
+ indexDocuments(client, indexName2, numDocsInIndex2);
+ ensureGreen(indexName1, indexName2);
+
+ internalCluster().startDataOnlyNode();
+ logger.info("--> snapshot");
+ CreateSnapshotResponse createSnapshotResponse = client.admin()
+ .cluster()
+ .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1, indexName2)
+ .get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(
+ createSnapshotResponse.getSnapshotInfo().successfulShards(),
+ equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
+ );
+ assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
+
+ updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false));
+ CreateSnapshotResponse createSnapshotResponse2 = client.admin()
+ .cluster()
+ .prepareCreateSnapshot(snapshotRepoName, snapshotName2)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1, indexName2)
+ .get();
+ assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(
+ createSnapshotResponse2.getSnapshotInfo().successfulShards(),
+ equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards())
+ );
+ assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
+
+ DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet();
+ assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED);
+ indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5));
+ ensureGreen(indexName1);
+
+ RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(false)
+ .setIndices(indexName1)
+ .setRenamePattern(indexName1)
+ .setRenameReplacement(restoredIndexName1)
+ .get();
+ RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName2)
+ .setWaitForCompletion(false)
+ .setIndices(indexName2)
+ .setRenamePattern(indexName2)
+ .setRenameReplacement(restoredIndexName2)
+ .get();
+ assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED);
+ assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED);
+ ensureGreen(restoredIndexName1, restoredIndexName2);
+ assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1);
+ assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2);
+
+ // deleting data for restoredIndexName1 and restoring from remote store.
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary));
+ ensureRed(restoredIndexName1);
+ // Re-initialize client to make sure we are not using client from stopped node.
+ client = client(clusterManagerNode);
+ assertAcked(client.admin().indices().prepareClose(restoredIndexName1));
+ client.admin()
+ .cluster()
+ .restoreRemoteStore(
+ new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true),
+ PlainActionFuture.newFuture()
+ );
+ ensureYellowAndNoInitializingShards(restoredIndexName1);
+ ensureGreen(restoredIndexName1);
+ assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1);
+ // indexing some new docs and validating
+ indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2);
+ ensureGreen(restoredIndexName1);
+ assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2);
+
+ // restore index as seg rep enabled with remote store and remote translog disabled
+ RestoreSnapshotResponse restoreSnapshotResponse3 = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(false)
+ .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED)
+ .setIndices(indexName1)
+ .setRenamePattern(indexName1)
+ .setRenameReplacement(restoredIndexName1Seg)
+ .get();
+ assertEquals(restoreSnapshotResponse3.status(), RestStatus.ACCEPTED);
+ ensureGreen(restoredIndexName1Seg);
+
+ GetIndexResponse getIndexResponse = client.admin()
+ .indices()
+ .getIndex(new GetIndexRequest().indices(restoredIndexName1Seg).includeDefaults(true))
+ .get();
+ indexSettings = getIndexResponse.settings().get(restoredIndexName1Seg);
+ assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED));
+ assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null));
+ assertEquals(ReplicationType.SEGMENT.toString(), indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE));
+ assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1);
+ // indexing some new docs and validating
+ indexDocuments(client, restoredIndexName1Seg, numDocsInIndex1, numDocsInIndex1 + 2);
+ ensureGreen(restoredIndexName1Seg);
+ assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1 + 2);
+
+ // restore index as doc rep based from shallow copy snapshot
+ RestoreSnapshotResponse restoreSnapshotResponse4 = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(false)
+ .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, IndexMetadata.SETTING_REPLICATION_TYPE)
+ .setIndices(indexName1)
+ .setRenamePattern(indexName1)
+ .setRenameReplacement(restoredIndexName1Doc)
+ .get();
+ assertEquals(restoreSnapshotResponse4.status(), RestStatus.ACCEPTED);
+ ensureGreen(restoredIndexName1Doc);
+
+ getIndexResponse = client.admin()
+ .indices()
+ .getIndex(new GetIndexRequest().indices(restoredIndexName1Doc).includeDefaults(true))
+ .get();
+ indexSettings = getIndexResponse.settings().get(restoredIndexName1Doc);
+ assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED));
+ assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null));
+ assertNull(indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE));
+ assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1);
+ // indexing some new docs and validating
+ indexDocuments(client, restoredIndexName1Doc, numDocsInIndex1, numDocsInIndex1 + 2);
+ ensureGreen(restoredIndexName1Doc);
+ assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1 + 2);
+ }
+
+ public void testRestoreInSameRemoteStoreEnabledIndex() throws IOException {
+ String clusterManagerNode = internalCluster().startClusterManagerOnlyNode();
+ String primary = internalCluster().startDataOnlyNode();
+ String indexName1 = "testindex1";
+ String indexName2 = "testindex2";
+ String snapshotRepoName = "test-restore-snapshot-repo";
+ String snapshotName1 = "test-restore-snapshot1";
+ String snapshotName2 = "test-restore-snapshot2";
+ Path absolutePath1 = randomRepoPath().toAbsolutePath();
+ logger.info("Snapshot Path [{}]", absolutePath1);
+ String restoredIndexName2 = indexName2 + "-restored";
+
+ boolean enableShallowCopy = randomBoolean();
+ createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, enableShallowCopy));
+
+ Client client = client();
+ Settings indexSettings = getIndexSettings(1, 0).build();
+ createIndex(indexName1, indexSettings);
+
+ Settings indexSettings2 = getIndexSettings(1, 0).build();
+ createIndex(indexName2, indexSettings2);
+
+ final int numDocsInIndex1 = 5;
+ final int numDocsInIndex2 = 6;
+ indexDocuments(client, indexName1, numDocsInIndex1);
+ indexDocuments(client, indexName2, numDocsInIndex2);
+ ensureGreen(indexName1, indexName2);
+
+ internalCluster().startDataOnlyNode();
+ logger.info("--> snapshot");
+ CreateSnapshotResponse createSnapshotResponse = client.admin()
+ .cluster()
+ .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1, indexName2)
+ .get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(
+ createSnapshotResponse.getSnapshotInfo().successfulShards(),
+ equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
+ );
+ assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
+
+ updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false));
+ CreateSnapshotResponse createSnapshotResponse2 = client.admin()
+ .cluster()
+ .prepareCreateSnapshot(snapshotRepoName, snapshotName2)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1, indexName2)
+ .get();
+ assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(
+ createSnapshotResponse2.getSnapshotInfo().successfulShards(),
+ equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards())
+ );
+ assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
+
+ DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet();
+ assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED);
+ indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5));
+ ensureGreen(indexName1);
+
+ assertAcked(client().admin().indices().prepareClose(indexName1));
+
+ RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(false)
+ .setIndices(indexName1)
+ .get();
+ RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName2)
+ .setWaitForCompletion(false)
+ .setIndices(indexName2)
+ .setRenamePattern(indexName2)
+ .setRenameReplacement(restoredIndexName2)
+ .get();
+ assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED);
+ assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED);
+ ensureGreen(indexName1, restoredIndexName2);
+ assertDocsPresentInIndex(client, indexName1, numDocsInIndex1);
+ assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2);
+
+ // deleting data for restoredIndexName1 and restoring from remote store.
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary));
+ ensureRed(indexName1);
+ // Re-initialize client to make sure we are not using client from stopped node.
+ client = client(clusterManagerNode);
+ assertAcked(client.admin().indices().prepareClose(indexName1));
+ client.admin()
+ .cluster()
+ .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indexName1).restoreAllShards(true), PlainActionFuture.newFuture());
+ ensureYellowAndNoInitializingShards(indexName1);
+ ensureGreen(indexName1);
+ assertDocsPresentInIndex(client(), indexName1, numDocsInIndex1);
+ // indexing some new docs and validating
+ indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + 2);
+ ensureGreen(indexName1);
+ assertDocsPresentInIndex(client, indexName1, numDocsInIndex1 + 2);
+ }
+
+ public void testRestoreShallowCopySnapshotWithDifferentRepo() throws IOException {
+ String clusterManagerNode = internalCluster().startClusterManagerOnlyNode();
+ String primary = internalCluster().startDataOnlyNode();
+ String indexName1 = "testindex1";
+ String indexName2 = "testindex2";
+ String snapshotRepoName = "test-restore-snapshot-repo";
+ String remoteStoreRepo2Name = "test-rs-repo-2" + TEST_REMOTE_STORE_REPO_SUFFIX;
+ String snapshotName1 = "test-restore-snapshot1";
+ Path absolutePath1 = randomRepoPath().toAbsolutePath();
+ Path absolutePath3 = randomRepoPath().toAbsolutePath();
+ String restoredIndexName1 = indexName1 + "-restored";
+
+ createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false));
+ createRepository(remoteStoreRepo2Name, "fs", absolutePath3);
+
+ Client client = client();
+ Settings indexSettings = getIndexSettings(1, 0).build();
+ createIndex(indexName1, indexSettings);
+
+ Settings indexSettings2 = getIndexSettings(1, 0).build();
+ createIndex(indexName2, indexSettings2);
+
+ final int numDocsInIndex1 = 5;
+ final int numDocsInIndex2 = 6;
+ indexDocuments(client, indexName1, numDocsInIndex1);
+ indexDocuments(client, indexName2, numDocsInIndex2);
+ ensureGreen(indexName1, indexName2);
+
+ internalCluster().startDataOnlyNode();
+
+ logger.info("--> snapshot");
+ CreateSnapshotResponse createSnapshotResponse = client.admin()
+ .cluster()
+ .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1, indexName2)
+ .get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(
+ createSnapshotResponse.getSnapshotInfo().successfulShards(),
+ equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
+ );
+ assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
+
+ Settings remoteStoreIndexSettings = Settings.builder()
+ .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo2Name)
+ .build();
+ // restore index as a remote store index with different remote store repo
+ RestoreSnapshotResponse restoreSnapshotResponse = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(false)
+ .setIndexSettings(remoteStoreIndexSettings)
+ .setIndices(indexName1)
+ .setRenamePattern(indexName1)
+ .setRenameReplacement(restoredIndexName1)
+ .get();
+ assertEquals(restoreSnapshotResponse.status(), RestStatus.ACCEPTED);
+ ensureGreen(restoredIndexName1);
+ assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1);
+
+ // deleting data for restoredIndexName1 and restoring from remote store.
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary));
+ // Re-initialize client to make sure we are not using client from stopped node.
+ client = client(clusterManagerNode);
+ assertAcked(client.admin().indices().prepareClose(restoredIndexName1));
+ client.admin()
+ .cluster()
+ .restoreRemoteStore(
+ new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true),
+ PlainActionFuture.newFuture()
+ );
+ ensureYellowAndNoInitializingShards(restoredIndexName1);
+ ensureGreen(restoredIndexName1);
+ // indexing some new docs and validating
+ assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1);
+ indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2);
+ ensureGreen(restoredIndexName1);
+ assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2);
+ }
+
+ public void testRestoreShallowSnapshotRepositoryOverriden() throws ExecutionException, InterruptedException {
+ String indexName1 = "testindex1";
+ String snapshotRepoName = "test-restore-snapshot-repo";
+ String remoteStoreRepoNameUpdated = "test-rs-repo-updated" + TEST_REMOTE_STORE_REPO_SUFFIX;
+ String snapshotName1 = "test-restore-snapshot1";
+ Path absolutePath1 = randomRepoPath().toAbsolutePath();
+ Path absolutePath2 = randomRepoPath().toAbsolutePath();
+ String[] pathTokens = absolutePath1.toString().split("/");
+ String basePath = pathTokens[pathTokens.length - 1];
+ Arrays.copyOf(pathTokens, pathTokens.length - 1);
+ Path location = PathUtils.get(String.join("/", pathTokens));
+ pathTokens = absolutePath2.toString().split("/");
+ String basePath2 = pathTokens[pathTokens.length - 1];
+ Arrays.copyOf(pathTokens, pathTokens.length - 1);
+ Path location2 = PathUtils.get(String.join("/", pathTokens));
+ logger.info("Path 1 [{}]", absolutePath1);
+ logger.info("Path 2 [{}]", absolutePath2);
+ String restoredIndexName1 = indexName1 + "-restored";
+
+ createRepository(snapshotRepoName, "fs", getRepositorySettings(location, basePath, true));
+
+ Client client = client();
+ Settings indexSettings = Settings.builder()
+ .put(super.indexSettings())
+ .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s")
+ .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ .build();
+ createIndex(indexName1, indexSettings);
+
+ int numDocsInIndex1 = randomIntBetween(2, 5);
+ indexDocuments(client, indexName1, numDocsInIndex1);
+
+ ensureGreen(indexName1);
+
+ logger.info("--> snapshot");
+ CreateSnapshotResponse createSnapshotResponse = client.admin()
+ .cluster()
+ .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1)
+ .get();
+ assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(
+ createSnapshotResponse.getSnapshotInfo().successfulShards(),
+ equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
+ );
+ assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
+
+ createRepository(BASE_REMOTE_REPO, "fs", absolutePath2);
+
+ RestoreSnapshotResponse restoreSnapshotResponse = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1)
+ .setRenamePattern(indexName1)
+ .setRenameReplacement(restoredIndexName1)
+ .get();
+
+ assertTrue(restoreSnapshotResponse.getRestoreInfo().failedShards() > 0);
+
+ ensureRed(restoredIndexName1);
+
+ client().admin().indices().close(Requests.closeIndexRequest(restoredIndexName1)).get();
+ createRepository(remoteStoreRepoNameUpdated, "fs", remoteRepoPath);
+ RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin()
+ .cluster()
+ .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
+ .setWaitForCompletion(true)
+ .setIndices(indexName1)
+ .setRenamePattern(indexName1)
+ .setRenameReplacement(restoredIndexName1)
+ .setSourceRemoteStoreRepository(remoteStoreRepoNameUpdated)
+ .get();
+
+ assertTrue(restoreSnapshotResponse2.getRestoreInfo().failedShards() == 0);
+ ensureGreen(restoredIndexName1);
+ assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1);
+
+ // indexing some new docs and validating
+ indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2);
+ ensureGreen(restoredIndexName1);
+ assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2);
+ }
+
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java
index 3fe7f3d553a1b..9641c013bf226 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java
@@ -12,12 +12,12 @@
import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsResponse;
import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
-import org.opensearch.index.remote.RemoteRefreshSegmentTracker;
+import org.opensearch.index.remote.RemoteSegmentTransferTracker;
import org.opensearch.repositories.RepositoriesService;
import org.opensearch.snapshots.mockstore.MockRepository;
import org.opensearch.test.OpenSearchIntegTestCase;
@@ -92,7 +92,7 @@ private void validateBackpressure(
assertTrue(ex.getMessage().contains("rejected execution on primary shard"));
assertTrue(ex.getMessage().contains(breachMode));
- RemoteRefreshSegmentTracker.Stats stats = stats();
+ RemoteSegmentTransferTracker.Stats stats = stats();
assertTrue(stats.bytesLag > 0);
assertTrue(stats.refreshTimeLagMs > 0);
assertTrue(stats.localRefreshNumber - stats.remoteRefreshNumber > 0);
@@ -102,7 +102,7 @@ private void validateBackpressure(
.setRandomControlIOExceptionRate(0d);
assertBusy(() -> {
- RemoteRefreshSegmentTracker.Stats finalStats = stats();
+ RemoteSegmentTransferTracker.Stats finalStats = stats();
assertEquals(0, finalStats.bytesLag);
assertEquals(0, finalStats.refreshTimeLagMs);
assertEquals(0, finalStats.localRefreshNumber - finalStats.remoteRefreshNumber);
@@ -115,11 +115,11 @@ private void validateBackpressure(
deleteRepo();
}
- private RemoteRefreshSegmentTracker.Stats stats() {
+ private RemoteSegmentTransferTracker.Stats stats() {
String shardId = "0";
RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, shardId).get();
final String indexShardId = String.format(Locale.ROOT, "[%s][%s]", INDEX_NAME, shardId);
- List matches = Arrays.stream(response.getShards())
+ List matches = Arrays.stream(response.getRemoteStoreStats())
.filter(stat -> indexShardId.equals(stat.getStats().shardId.toString()))
.collect(Collectors.toList());
assertEquals(1, matches.size());
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java
index c5d023bdd7a64..a890bdb8f4963 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java
@@ -26,23 +26,83 @@
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
+import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING;
+import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING;
+import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING;
+import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
public class RemoteStoreBaseIntegTestCase extends OpenSearchIntegTestCase {
- protected static final String REPOSITORY_NAME = "test-remore-store-repo";
- protected static final String REPOSITORY_2_NAME = "test-remore-store-repo-2";
+ protected static final String REPOSITORY_NAME = "test-remote-store-repo";
+ protected static final String REPOSITORY_2_NAME = "test-remote-store-repo-2";
protected static final int SHARD_COUNT = 1;
protected static final int REPLICA_COUNT = 1;
+ protected static final String TOTAL_OPERATIONS = "total-operations";
+ protected static final String REFRESHED_OR_FLUSHED_OPERATIONS = "refreshed-or-flushed-operations";
+ protected static final String MAX_SEQ_NO_TOTAL = "max-seq-no-total";
+ protected static final String MAX_SEQ_NO_REFRESHED_OR_FLUSHED = "max-seq-no-refreshed-or-flushed";
+
protected Path absolutePath;
protected Path absolutePath2;
+ private final List documentKeys = List.of(
+ randomAlphaOfLength(5),
+ randomAlphaOfLength(5),
+ randomAlphaOfLength(5),
+ randomAlphaOfLength(5),
+ randomAlphaOfLength(5)
+ );
+
+ protected Map indexData(int numberOfIterations, boolean invokeFlush, String index) {
+ long totalOperations = 0;
+ long refreshedOrFlushedOperations = 0;
+ long maxSeqNo = -1;
+ long maxSeqNoRefreshedOrFlushed = -1;
+ int shardId = 0;
+ Map indexingStats = new HashMap<>();
+ for (int i = 0; i < numberOfIterations; i++) {
+ if (invokeFlush) {
+ flush(index);
+ } else {
+ refresh(index);
+ }
+ maxSeqNoRefreshedOrFlushed = maxSeqNo;
+ indexingStats.put(MAX_SEQ_NO_REFRESHED_OR_FLUSHED + "-shard-" + shardId, maxSeqNoRefreshedOrFlushed);
+ refreshedOrFlushedOperations = totalOperations;
+ int numberOfOperations = randomIntBetween(20, 50);
+ for (int j = 0; j < numberOfOperations; j++) {
+ IndexResponse response = indexSingleDoc(index);
+ maxSeqNo = response.getSeqNo();
+ shardId = response.getShardId().id();
+ indexingStats.put(MAX_SEQ_NO_TOTAL + "-shard-" + shardId, maxSeqNo);
+ }
+ totalOperations += numberOfOperations;
+ }
+
+ indexingStats.put(TOTAL_OPERATIONS, totalOperations);
+ indexingStats.put(REFRESHED_OR_FLUSHED_OPERATIONS, refreshedOrFlushedOperations);
+ indexingStats.put(MAX_SEQ_NO_TOTAL, maxSeqNo);
+ indexingStats.put(MAX_SEQ_NO_REFRESHED_OR_FLUSHED, maxSeqNoRefreshedOrFlushed);
+ return indexingStats;
+ }
@Override
protected boolean addMockInternalEngine() {
return false;
}
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put(remoteStoreClusterSettings(REPOSITORY_NAME, REPOSITORY_2_NAME, true))
+ .build();
+ }
+
@Override
protected Settings featureFlagSettings() {
return Settings.builder()
@@ -56,21 +116,41 @@ public Settings indexSettings() {
return defaultIndexSettings();
}
- IndexResponse indexSingleDoc(String indexName) {
+ protected IndexResponse indexSingleDoc(String indexName) {
return client().prepareIndex(indexName)
.setId(UUIDs.randomBase64UUID())
- .setSource(randomAlphaOfLength(5), randomAlphaOfLength(5))
+ .setSource(documentKeys.get(randomIntBetween(0, documentKeys.size() - 1)), randomAlphaOfLength(5))
.get();
}
+ public static Settings remoteStoreClusterSettings(String segmentRepoName) {
+ return remoteStoreClusterSettings(segmentRepoName, segmentRepoName);
+ }
+
+ public static Settings remoteStoreClusterSettings(
+ String segmentRepoName,
+ String translogRepoName,
+ boolean randomizeSameRepoForRSSAndRTS
+ ) {
+ return remoteStoreClusterSettings(
+ segmentRepoName,
+ randomizeSameRepoForRSSAndRTS ? (randomBoolean() ? translogRepoName : segmentRepoName) : translogRepoName
+ );
+ }
+
+ public static Settings remoteStoreClusterSettings(String segmentRepoName, String translogRepoName) {
+ return Settings.builder()
+ .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT)
+ .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true)
+ .put(CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey(), segmentRepoName)
+ .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), translogRepoName)
+ .build();
+ }
+
private Settings defaultIndexSettings() {
- boolean sameRepoForRSSAndRTS = randomBoolean();
return Settings.builder()
.put(super.indexSettings())
.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false)
- .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, sameRepoForRSSAndRTS ? REPOSITORY_NAME : REPOSITORY_2_NAME)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, SHARD_COUNT)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, REPLICA_COUNT)
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s")
@@ -119,7 +199,7 @@ public void teardown() {
assertAcked(clusterAdmin().prepareDeleteRepository(REPOSITORY_2_NAME));
}
- public int getFileCount(Path path) throws Exception {
+ public static int getFileCount(Path path) throws Exception {
final AtomicInteger filesExisting = new AtomicInteger(0);
Files.walkFileTree(path, new SimpleFileVisitor<>() {
@Override
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java
index b4456f887cbaa..4d5648c74ba5c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java
@@ -104,9 +104,17 @@ private void testRestoreWithMergeFlow(int numberOfIterations, boolean invokeFlus
Map indexStats = indexData(numberOfIterations, invokeFlush, flushAfterMerge, deletedDocs);
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME)));
- assertAcked(client().admin().indices().prepareClose(INDEX_NAME));
- client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME), PlainActionFuture.newFuture());
+ boolean restoreAllShards = randomBoolean();
+ if (restoreAllShards) {
+ assertAcked(client().admin().indices().prepareClose(INDEX_NAME));
+ }
+ client().admin()
+ .cluster()
+ .restoreRemoteStore(
+ new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(restoreAllShards),
+ PlainActionFuture.newFuture()
+ );
ensureGreen(INDEX_NAME);
if (deletedDocs == -1) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java
index 42bd4b5173fa3..1bd0915a45048 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java
@@ -10,48 +10,38 @@
import org.hamcrest.MatcherAssert;
import org.junit.Before;
-import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest;
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.action.admin.indices.recovery.RecoveryResponse;
import org.opensearch.action.index.IndexResponse;
-import org.opensearch.action.support.PlainActionFuture;
-import org.opensearch.cluster.health.ClusterHealthStatus;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.routing.RecoverySource;
-import org.opensearch.common.UUIDs;
import org.opensearch.common.settings.Settings;
import org.opensearch.index.shard.RemoteStoreRefreshListener;
import org.opensearch.indices.recovery.RecoveryState;
import org.opensearch.plugins.Plugin;
-import org.opensearch.test.InternalTestCluster;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.transport.MockTransportService;
-import java.io.IOException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collection;
-import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
-import static org.hamcrest.Matchers.comparesEqualTo;
+import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.oneOf;
+import static org.hamcrest.Matchers.comparesEqualTo;
+import static org.hamcrest.Matchers.comparesEqualTo;
import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.oneOf;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
-@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0)
+@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 0)
public class RemoteStoreIT extends RemoteStoreBaseIntegTestCase {
private static final String INDEX_NAME = "remote-store-test-idx-1";
- private static final String INDEX_NAMES = "test-remote-store-1,test-remote-store-2,remote-store-test-index-1,remote-store-test-index-2";
- private static final String INDEX_NAMES_WILDCARD = "test-remote-store-*,remote-store-test-index-*";
- private static final String TOTAL_OPERATIONS = "total-operations";
- private static final String REFRESHED_OR_FLUSHED_OPERATIONS = "refreshed-or-flushed-operations";
- private static final String MAX_SEQ_NO_TOTAL = "max-seq-no-total";
- private static final String MAX_SEQ_NO_REFRESHED_OR_FLUSHED = "max-seq-no-refreshed-or-flushed";
@Override
protected Collection> nodePlugins() {
@@ -68,389 +58,6 @@ public Settings indexSettings() {
return remoteStoreIndexSettings(0);
}
- private IndexResponse indexSingleDoc() {
- return client().prepareIndex(INDEX_NAME)
- .setId(UUIDs.randomBase64UUID())
- .setSource(randomAlphaOfLength(5), randomAlphaOfLength(5))
- .get();
- }
-
- private Map indexData(int numberOfIterations, boolean invokeFlush, String index) {
- long totalOperations = 0;
- long refreshedOrFlushedOperations = 0;
- long maxSeqNo = -1;
- long maxSeqNoRefreshedOrFlushed = -1;
- int shardId = 0;
- Map indexingStats = new HashMap<>();
- for (int i = 0; i < numberOfIterations; i++) {
- if (invokeFlush) {
- flush(index);
- } else {
- refresh(index);
- }
- maxSeqNoRefreshedOrFlushed = maxSeqNo;
- indexingStats.put(MAX_SEQ_NO_REFRESHED_OR_FLUSHED + "-shard-" + shardId, maxSeqNoRefreshedOrFlushed);
- refreshedOrFlushedOperations = totalOperations;
- int numberOfOperations = randomIntBetween(20, 50);
- for (int j = 0; j < numberOfOperations; j++) {
- IndexResponse response = INDEX_NAME.equals(index) ? indexSingleDoc() : indexSingleDoc(index);
- maxSeqNo = response.getSeqNo();
- shardId = response.getShardId().id();
- indexingStats.put(MAX_SEQ_NO_TOTAL + "-shard-" + shardId, maxSeqNo);
- }
- totalOperations += numberOfOperations;
- }
-
- indexingStats.put(TOTAL_OPERATIONS, totalOperations);
- indexingStats.put(REFRESHED_OR_FLUSHED_OPERATIONS, refreshedOrFlushedOperations);
- indexingStats.put(MAX_SEQ_NO_TOTAL, maxSeqNo);
- indexingStats.put(MAX_SEQ_NO_REFRESHED_OR_FLUSHED, maxSeqNoRefreshedOrFlushed);
- return indexingStats;
- }
-
- private void verifyRestoredData(Map indexStats, boolean checkTotal, String indexName) {
- String statsGranularity = checkTotal ? TOTAL_OPERATIONS : REFRESHED_OR_FLUSHED_OPERATIONS;
- String maxSeqNoGranularity = checkTotal ? MAX_SEQ_NO_TOTAL : MAX_SEQ_NO_REFRESHED_OR_FLUSHED;
- ensureYellowAndNoInitializingShards(indexName);
- ensureGreen(indexName);
- assertHitCount(client().prepareSearch(indexName).setSize(0).get(), indexStats.get(statsGranularity));
- IndexResponse response = INDEX_NAME.equals(indexName) ? indexSingleDoc() : indexSingleDoc(indexName);
- assertEquals(indexStats.get(maxSeqNoGranularity + "-shard-" + response.getShardId().id()) + 1, response.getSeqNo());
- refresh(indexName);
- assertHitCount(client().prepareSearch(indexName).setSize(0).get(), indexStats.get(statsGranularity) + 1);
- }
-
- private void prepareCluster(int numClusterManagerNodes, int numDataOnlyNodes, String indices, int replicaCount, int shardCount) {
- internalCluster().startClusterManagerOnlyNodes(numClusterManagerNodes);
- internalCluster().startDataOnlyNodes(numDataOnlyNodes);
- for (String index : indices.split(",")) {
- createIndex(index, remoteStoreIndexSettings(replicaCount, shardCount));
- ensureYellowAndNoInitializingShards(index);
- ensureGreen(index);
- }
- }
-
- /**
- * Helper function to test restoring an index with no replication from remote store. Only primary node is dropped.
- * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data.
- * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked.
- * @throws IOException IO Exception.
- */
- private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, int shardCount) throws IOException {
- prepareCluster(0, 3, INDEX_NAME, 0, shardCount);
- Map indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME);
- assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
-
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME)));
- ensureRed(INDEX_NAME);
-
- assertAcked(client().admin().indices().prepareClose(INDEX_NAME));
- client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME), PlainActionFuture.newFuture());
-
- ensureGreen(INDEX_NAME);
- assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
- verifyRestoredData(indexStats, true, INDEX_NAME);
- }
-
- /**
- * Helper function to test restoring an index having replicas from remote store when all the nodes housing the primary/replica drop.
- * @param remoteTranslog If true, Remote Translog Store is also enabled in addition to Remote Segment Store.
- * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data.
- * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked.
- * @throws IOException IO Exception.
- */
- private void testRestoreFlowBothPrimaryReplicasDown(boolean remoteTranslog, int numberOfIterations, boolean invokeFlush, int shardCount)
- throws IOException {
- prepareCluster(1, 2, INDEX_NAME, 1, shardCount);
- Map indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME);
- assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
-
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(INDEX_NAME)));
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME)));
- ensureRed(INDEX_NAME);
- internalCluster().startDataOnlyNodes(2);
-
- assertAcked(client().admin().indices().prepareClose(INDEX_NAME));
- client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME), PlainActionFuture.newFuture());
-
- ensureGreen(INDEX_NAME);
-
- assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
- assertEquals(0, getNumShards(INDEX_NAME).numReplicas);
- verifyRestoredData(indexStats, true, INDEX_NAME);
- }
-
- /**
- * Helper function to test restoring multiple indices from remote store when all the nodes housing the primary/replica drop.
- * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data.
- * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked.
- * @throws IOException IO Exception.
- */
- private void testRestoreFlowMultipleIndices(int numberOfIterations, boolean invokeFlush, int shardCount) throws IOException {
- prepareCluster(1, 3, INDEX_NAMES, 1, shardCount);
- String[] indices = INDEX_NAMES.split(",");
- Map> indicesStats = new HashMap<>();
- for (String index : indices) {
- Map indexStats = indexData(numberOfIterations, invokeFlush, index);
- indicesStats.put(index, indexStats);
- assertEquals(shardCount, getNumShards(index).totalNumShards);
- }
-
- for (String index : indices) {
- ClusterHealthStatus indexHealth = ensureRed(index);
- if (ClusterHealthStatus.RED.equals(indexHealth)) {
- continue;
- }
-
- if (ClusterHealthStatus.GREEN.equals(indexHealth)) {
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(index)));
- }
-
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
- }
-
- ensureRed(indices);
- internalCluster().startDataOnlyNodes(3);
-
- assertAcked(client().admin().indices().prepareClose(indices));
- client().admin()
- .cluster()
- .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAMES_WILDCARD.split(",")), PlainActionFuture.newFuture());
- ensureGreen(indices);
- for (String index : indices) {
- assertEquals(shardCount, getNumShards(index).totalNumShards);
- verifyRestoredData(indicesStats.get(index), true, index);
- }
- }
-
- /**
- * Simulates all data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/6188")
- public void testRemoteTranslogRestoreWithNoDataPostCommit() throws IOException {
- testRestoreFlow(1, true, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates all data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- public void testRemoteTranslogRestoreWithNoDataPostRefresh() throws IOException {
- testRestoreFlow(1, false, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- public void testRemoteTranslogRestoreWithRefreshedData() throws IOException {
- testRestoreFlow(randomIntBetween(2, 5), false, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- public void testRemoteTranslogRestoreWithCommittedData() throws IOException {
- testRestoreFlow(randomIntBetween(2, 5), true, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates all data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- // @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/6188")
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
- public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws IOException {
- testRestoreFlowBothPrimaryReplicasDown(true, 1, true, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates all data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
- public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws IOException {
- testRestoreFlowBothPrimaryReplicasDown(true, 1, false, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
- public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws IOException {
- testRestoreFlowBothPrimaryReplicasDown(true, randomIntBetween(2, 5), false, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
- public void testRTSRestoreWithCommittedDataPrimaryReplicaDown() throws IOException {
- testRestoreFlowBothPrimaryReplicasDown(true, randomIntBetween(2, 5), true, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store
- * for multiple indices matching a wildcard name pattern.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8480")
- public void testRTSRestoreWithCommittedDataMultipleIndicesPatterns() throws IOException {
- testRestoreFlowMultipleIndices(2, true, randomIntBetween(1, 5));
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store,
- * with all remote-enabled red indices considered for the restore by default.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8480")
- public void testRTSRestoreWithCommittedDataDefaultAllIndices() throws IOException {
- int shardCount = randomIntBetween(1, 5);
- prepareCluster(1, 3, INDEX_NAMES, 1, shardCount);
- String[] indices = INDEX_NAMES.split(",");
- Map> indicesStats = new HashMap<>();
- for (String index : indices) {
- Map indexStats = indexData(2, true, index);
- indicesStats.put(index, indexStats);
- assertEquals(shardCount, getNumShards(index).totalNumShards);
- }
-
- for (String index : indices) {
- if (ClusterHealthStatus.RED.equals(ensureRed(index))) {
- continue;
- }
-
- if (ClusterHealthStatus.GREEN.equals(ensureRed(index))) {
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(index)));
- }
-
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
- }
-
- ensureRed(indices);
- internalCluster().startDataOnlyNodes(3);
-
- assertAcked(client().admin().indices().prepareClose(indices));
- client().admin()
- .cluster()
- .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(new String[] {}), PlainActionFuture.newFuture());
- ensureGreen(indices);
-
- for (String index : indices) {
- assertEquals(shardCount, getNumShards(index).totalNumShards);
- verifyRestoredData(indicesStats.get(index), true, index);
- }
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store,
- * with only some of the remote-enabled red indices requested for the restore.
- * @throws IOException IO Exception.
- */
- public void testRTSRestoreWithCommittedDataNotAllRedRemoteIndices() throws IOException {
- int shardCount = randomIntBetween(1, 5);
- prepareCluster(1, 3, INDEX_NAMES, 0, shardCount);
- String[] indices = INDEX_NAMES.split(",");
- Map> indicesStats = new HashMap<>();
- for (String index : indices) {
- Map indexStats = indexData(2, true, index);
- indicesStats.put(index, indexStats);
- assertEquals(shardCount, getNumShards(index).totalNumShards);
- }
-
- for (String index : indices) {
- if (ClusterHealthStatus.RED.equals(ensureRed(index))) {
- continue;
- }
-
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
- }
-
- ensureRed(indices);
- internalCluster().startDataOnlyNodes(3);
-
- assertAcked(client().admin().indices().prepareClose(indices[0], indices[1]));
- client().admin()
- .cluster()
- .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indices[0], indices[1]), PlainActionFuture.newFuture());
- ensureGreen(indices[0], indices[1]);
- assertEquals(shardCount, getNumShards(indices[0]).totalNumShards);
- verifyRestoredData(indicesStats.get(indices[0]), true, indices[0]);
- assertEquals(shardCount, getNumShards(indices[1]).totalNumShards);
- verifyRestoredData(indicesStats.get(indices[1]), true, indices[1]);
- ensureRed(indices[2], indices[3]);
- }
-
- /**
- * Simulates refreshed data restored using Remote Segment Store
- * and unrefreshed data restored using Remote Translog Store,
- * with all remote-enabled red indices being considered for the restore
- * except those matching the specified exclusion pattern.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8480")
- public void testRTSRestoreWithCommittedDataExcludeIndicesPatterns() throws IOException {
- int shardCount = randomIntBetween(1, 5);
- prepareCluster(1, 3, INDEX_NAMES, 1, shardCount);
- String[] indices = INDEX_NAMES.split(",");
- Map> indicesStats = new HashMap<>();
- for (String index : indices) {
- Map indexStats = indexData(2, true, index);
- indicesStats.put(index, indexStats);
- assertEquals(shardCount, getNumShards(index).totalNumShards);
- }
-
- for (String index : indices) {
- if (ClusterHealthStatus.RED.equals(ensureRed(index))) {
- continue;
- }
-
- if (ClusterHealthStatus.GREEN.equals(ensureRed(index))) {
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(index)));
- }
-
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
- }
-
- ensureRed(indices);
- internalCluster().startDataOnlyNodes(3);
-
- assertAcked(client().admin().indices().prepareClose(indices[0], indices[1]));
- client().admin()
- .cluster()
- .restoreRemoteStore(new RestoreRemoteStoreRequest().indices("*", "-remote-store-test-index-*"), PlainActionFuture.newFuture());
- ensureGreen(indices[0], indices[1]);
- assertEquals(shardCount, getNumShards(indices[0]).totalNumShards);
- verifyRestoredData(indicesStats.get(indices[0]), true, indices[0]);
- assertEquals(shardCount, getNumShards(indices[1]).totalNumShards);
- verifyRestoredData(indicesStats.get(indices[1]), true, indices[1]);
- ensureRed(indices[2], indices[3]);
- }
-
- /**
- * Simulates no-op restore from remote store,
- * when the index has no data.
- * @throws IOException IO Exception.
- */
- @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/6188")
- public void testRTSRestoreNoData() throws IOException {
- testRestoreFlow(0, true, randomIntBetween(1, 5));
- }
-
- // TODO: Restore flow - index aliases
-
private void testPeerRecovery(int numberOfIterations, boolean invokeFlush) throws Exception {
internalCluster().startDataOnlyNodes(3);
createIndex(INDEX_NAME, remoteStoreIndexSettings(0));
@@ -490,7 +97,7 @@ private void testPeerRecovery(int numberOfIterations, boolean invokeFlush) throw
assertEquals(0, recoverySource.get().getIndex().recoveredFileCount());
}
- IndexResponse response = indexSingleDoc();
+ IndexResponse response = indexSingleDoc(INDEX_NAME);
assertEquals(indexStats.get(MAX_SEQ_NO_TOTAL) + 1, response.getSeqNo());
refresh(INDEX_NAME);
assertBusy(
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java
new file mode 100644
index 0000000000000..fb65e98c13b3d
--- /dev/null
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java
@@ -0,0 +1,456 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.remotestore;
+
+import org.junit.Before;
+import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest;
+import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreResponse;
+import org.opensearch.action.index.IndexResponse;
+import org.opensearch.action.support.PlainActionFuture;
+import org.opensearch.cluster.health.ClusterHealthStatus;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.test.InternalTestCluster;
+import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.transport.MockTransportService;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
+
+@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 0)
+public class RemoteStoreRestoreIT extends RemoteStoreBaseIntegTestCase {
+ private static final String INDEX_NAME = "remote-store-test-idx-1";
+ private static final String INDEX_NAMES = "test-remote-store-1,test-remote-store-2,remote-store-test-index-1,remote-store-test-index-2";
+ private static final String INDEX_NAMES_WILDCARD = "test-remote-store-*,remote-store-test-index-*";
+ private static final String TOTAL_OPERATIONS = "total-operations";
+ private static final String REFRESHED_OR_FLUSHED_OPERATIONS = "refreshed-or-flushed-operations";
+ private static final String MAX_SEQ_NO_TOTAL = "max-seq-no-total";
+ private static final String MAX_SEQ_NO_REFRESHED_OR_FLUSHED = "max-seq-no-refreshed-or-flushed";
+
+ @Override
+ public Settings indexSettings() {
+ return remoteStoreIndexSettings(0);
+ }
+
+ @Override
+ protected Collection> nodePlugins() {
+ return Arrays.asList(MockTransportService.TestPlugin.class);
+ }
+
+ @Before
+ public void setup() {
+ setupRepo();
+ }
+
+ private void restore(String... indices) {
+ boolean restoreAllShards = randomBoolean();
+ if (restoreAllShards) {
+ assertAcked(client().admin().indices().prepareClose(indices));
+ }
+ client().admin()
+ .cluster()
+ .restoreRemoteStore(
+ new RestoreRemoteStoreRequest().indices(indices).restoreAllShards(restoreAllShards),
+ PlainActionFuture.newFuture()
+ );
+ }
+
+ private void verifyRestoredData(Map indexStats, boolean checkTotal, String indexName) {
+ // This is required to get updated number from already active shards which were not restored
+ refresh(indexName);
+ String statsGranularity = checkTotal ? TOTAL_OPERATIONS : REFRESHED_OR_FLUSHED_OPERATIONS;
+ String maxSeqNoGranularity = checkTotal ? MAX_SEQ_NO_TOTAL : MAX_SEQ_NO_REFRESHED_OR_FLUSHED;
+ ensureYellowAndNoInitializingShards(indexName);
+ ensureGreen(indexName);
+ assertHitCount(client().prepareSearch(indexName).setSize(0).get(), indexStats.get(statsGranularity));
+ IndexResponse response = indexSingleDoc(indexName);
+ assertEquals(indexStats.get(maxSeqNoGranularity + "-shard-" + response.getShardId().id()) + 1, response.getSeqNo());
+ refresh(indexName);
+ assertHitCount(client().prepareSearch(indexName).setSize(0).get(), indexStats.get(statsGranularity) + 1);
+ }
+
+ private void prepareCluster(int numClusterManagerNodes, int numDataOnlyNodes, String indices, int replicaCount, int shardCount) {
+ internalCluster().startClusterManagerOnlyNodes(numClusterManagerNodes);
+ internalCluster().startDataOnlyNodes(numDataOnlyNodes);
+ for (String index : indices.split(",")) {
+ createIndex(index, remoteStoreIndexSettings(replicaCount, shardCount));
+ ensureYellowAndNoInitializingShards(index);
+ ensureGreen(index);
+ }
+ }
+
+ /**
+ * Simulates all data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/6188")
+ public void testRemoteTranslogRestoreWithNoDataPostCommit() throws IOException {
+ testRestoreFlow(1, true, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates all data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ public void testRemoteTranslogRestoreWithNoDataPostRefresh() throws IOException {
+ testRestoreFlow(1, false, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ public void testRemoteTranslogRestoreWithRefreshedData() throws IOException {
+ testRestoreFlow(randomIntBetween(2, 5), false, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ public void testRemoteTranslogRestoreWithCommittedData() throws IOException {
+ testRestoreFlow(randomIntBetween(2, 5), true, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates all data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ // @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/6188")
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
+ public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws IOException {
+ testRestoreFlowBothPrimaryReplicasDown(1, true, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates all data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
+ public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws IOException {
+ testRestoreFlowBothPrimaryReplicasDown(1, false, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
+ public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws IOException {
+ testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), false, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479")
+ public void testRTSRestoreWithCommittedDataPrimaryReplicaDown() throws IOException {
+ testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), true, randomIntBetween(1, 5));
+ }
+
+ private void restoreAndVerify(int shardCount, int replicaCount, Map indexStats) {
+ restore(INDEX_NAME);
+ ensureGreen(INDEX_NAME);
+ // This is required to get updated number from already active shards which were not restored
+ assertEquals(shardCount * (1 + replicaCount), getNumShards(INDEX_NAME).totalNumShards);
+ assertEquals(replicaCount, getNumShards(INDEX_NAME).numReplicas);
+ verifyRestoredData(indexStats, true, INDEX_NAME);
+ }
+
+ /**
+ * Helper function to test restoring an index with no replication from remote store. Only primary node is dropped.
+ * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data.
+ * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked.
+ * @throws IOException IO Exception.
+ */
+ private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, int shardCount) throws IOException {
+ prepareCluster(0, 3, INDEX_NAME, 0, shardCount);
+ Map indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME);
+ assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
+
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME)));
+ ensureRed(INDEX_NAME);
+
+ restoreAndVerify(shardCount, 0, indexStats);
+ }
+
+ /**
+ * Helper function to test restoring an index having replicas from remote store when all the nodes housing the primary/replica drop.
+ * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data.
+ * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked.
+ * @throws IOException IO Exception.
+ */
+ private void testRestoreFlowBothPrimaryReplicasDown(int numberOfIterations, boolean invokeFlush, int shardCount) throws IOException {
+ prepareCluster(1, 2, INDEX_NAME, 1, shardCount);
+ Map indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME);
+ assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
+
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(INDEX_NAME)));
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME)));
+ ensureRed(INDEX_NAME);
+ internalCluster().startDataOnlyNodes(2);
+
+ restoreAndVerify(shardCount, 1, indexStats);
+ }
+
+ /**
+ * Helper function to test restoring multiple indices from remote store when all the nodes housing the primary/replica drop.
+ * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data.
+ * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked.
+ * @throws IOException IO Exception.
+ */
+ private void testRestoreFlowMultipleIndices(int numberOfIterations, boolean invokeFlush, int shardCount) throws IOException {
+ prepareCluster(1, 3, INDEX_NAMES, 1, shardCount);
+ String[] indices = INDEX_NAMES.split(",");
+ Map> indicesStats = new HashMap<>();
+ for (String index : indices) {
+ Map indexStats = indexData(numberOfIterations, invokeFlush, index);
+ indicesStats.put(index, indexStats);
+ assertEquals(shardCount, getNumShards(index).totalNumShards);
+ }
+
+ for (String index : indices) {
+ ClusterHealthStatus indexHealth = ensureRed(index);
+ if (ClusterHealthStatus.RED.equals(indexHealth)) {
+ continue;
+ }
+
+ if (ClusterHealthStatus.GREEN.equals(indexHealth)) {
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(index)));
+ }
+
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
+ }
+
+ ensureRed(indices);
+ internalCluster().startDataOnlyNodes(3);
+
+ boolean restoreAllShards = randomBoolean();
+ if (restoreAllShards) {
+ assertAcked(client().admin().indices().prepareClose(indices));
+ }
+ client().admin()
+ .cluster()
+ .restoreRemoteStore(
+ new RestoreRemoteStoreRequest().indices(INDEX_NAMES_WILDCARD.split(",")).restoreAllShards(restoreAllShards),
+ PlainActionFuture.newFuture()
+ );
+ ensureGreen(indices);
+ for (String index : indices) {
+ assertEquals(shardCount, getNumShards(index).totalNumShards);
+ verifyRestoredData(indicesStats.get(index), true, index);
+ }
+ }
+
+ public void testRestoreFlowAllShardsNoRedIndex() throws InterruptedException {
+ int shardCount = randomIntBetween(1, 5);
+ prepareCluster(0, 3, INDEX_NAME, 0, shardCount);
+ indexData(randomIntBetween(2, 5), true, INDEX_NAME);
+ assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
+
+ PlainActionFuture future = PlainActionFuture.newFuture();
+ client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(true), future);
+ try {
+ future.get();
+ } catch (ExecutionException e) {
+ // If the request goes to co-ordinator, e.getCause() can be RemoteTransportException
+ assertTrue(e.getCause() instanceof IllegalStateException || e.getCause().getCause() instanceof IllegalStateException);
+ }
+ }
+
+ public void testRestoreFlowNoRedIndex() {
+ int shardCount = randomIntBetween(1, 5);
+ prepareCluster(0, 3, INDEX_NAME, 0, shardCount);
+ Map indexStats = indexData(randomIntBetween(2, 5), true, INDEX_NAME);
+ assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
+
+ client().admin()
+ .cluster()
+ .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(false), PlainActionFuture.newFuture());
+
+ ensureGreen(INDEX_NAME);
+ assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards);
+ verifyRestoredData(indexStats, true, INDEX_NAME);
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store
+ * for multiple indices matching a wildcard name pattern.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8480")
+ public void testRTSRestoreWithCommittedDataMultipleIndicesPatterns() throws IOException {
+ testRestoreFlowMultipleIndices(2, true, randomIntBetween(1, 5));
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store,
+ * with all remote-enabled red indices considered for the restore by default.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8480")
+ public void testRTSRestoreWithCommittedDataDefaultAllIndices() throws IOException {
+ int shardCount = randomIntBetween(1, 5);
+ prepareCluster(1, 3, INDEX_NAMES, 1, shardCount);
+ String[] indices = INDEX_NAMES.split(",");
+ Map> indicesStats = new HashMap<>();
+ for (String index : indices) {
+ Map indexStats = indexData(2, true, index);
+ indicesStats.put(index, indexStats);
+ assertEquals(shardCount, getNumShards(index).totalNumShards);
+ }
+
+ for (String index : indices) {
+ if (ClusterHealthStatus.RED.equals(ensureRed(index))) {
+ continue;
+ }
+
+ if (ClusterHealthStatus.GREEN.equals(ensureRed(index))) {
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(index)));
+ }
+
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
+ }
+
+ ensureRed(indices);
+ internalCluster().startDataOnlyNodes(3);
+
+ restore(indices);
+ ensureGreen(indices);
+
+ for (String index : indices) {
+ assertEquals(shardCount, getNumShards(index).totalNumShards);
+ verifyRestoredData(indicesStats.get(index), true, index);
+ }
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store,
+ * with only some of the remote-enabled red indices requested for the restore.
+ * @throws IOException IO Exception.
+ */
+ public void testRTSRestoreWithCommittedDataNotAllRedRemoteIndices() throws IOException {
+ int shardCount = randomIntBetween(1, 5);
+ prepareCluster(1, 3, INDEX_NAMES, 0, shardCount);
+ String[] indices = INDEX_NAMES.split(",");
+ Map> indicesStats = new HashMap<>();
+ for (String index : indices) {
+ Map indexStats = indexData(2, true, index);
+ indicesStats.put(index, indexStats);
+ assertEquals(shardCount, getNumShards(index).totalNumShards);
+ }
+
+ for (String index : indices) {
+ if (ClusterHealthStatus.RED.equals(ensureRed(index))) {
+ continue;
+ }
+
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
+ }
+
+ ensureRed(indices);
+ internalCluster().startDataOnlyNodes(3);
+
+ boolean restoreAllShards = randomBoolean();
+ if (restoreAllShards) {
+ assertAcked(client().admin().indices().prepareClose(indices[0], indices[1]));
+ }
+ client().admin()
+ .cluster()
+ .restoreRemoteStore(
+ new RestoreRemoteStoreRequest().indices(indices[0], indices[1]).restoreAllShards(restoreAllShards),
+ PlainActionFuture.newFuture()
+ );
+ ensureGreen(indices[0], indices[1]);
+ assertEquals(shardCount, getNumShards(indices[0]).totalNumShards);
+ verifyRestoredData(indicesStats.get(indices[0]), true, indices[0]);
+ assertEquals(shardCount, getNumShards(indices[1]).totalNumShards);
+ verifyRestoredData(indicesStats.get(indices[1]), true, indices[1]);
+ ensureRed(indices[2], indices[3]);
+ }
+
+ /**
+ * Simulates refreshed data restored using Remote Segment Store
+ * and unrefreshed data restored using Remote Translog Store,
+ * with all remote-enabled red indices being considered for the restore
+ * except those matching the specified exclusion pattern.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8480")
+ public void testRTSRestoreWithCommittedDataExcludeIndicesPatterns() throws IOException {
+ int shardCount = randomIntBetween(1, 5);
+ prepareCluster(1, 3, INDEX_NAMES, 1, shardCount);
+ String[] indices = INDEX_NAMES.split(",");
+ Map> indicesStats = new HashMap<>();
+ for (String index : indices) {
+ Map indexStats = indexData(2, true, index);
+ indicesStats.put(index, indexStats);
+ assertEquals(shardCount, getNumShards(index).totalNumShards);
+ }
+
+ for (String index : indices) {
+ if (ClusterHealthStatus.RED.equals(ensureRed(index))) {
+ continue;
+ }
+
+ if (ClusterHealthStatus.GREEN.equals(ensureRed(index))) {
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(index)));
+ }
+
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(index)));
+ }
+
+ ensureRed(indices);
+ internalCluster().startDataOnlyNodes(3);
+
+ boolean restoreAllShards = randomBoolean();
+ if (restoreAllShards) {
+ assertAcked(client().admin().indices().prepareClose(indices[0], indices[1]));
+ }
+ client().admin()
+ .cluster()
+ .restoreRemoteStore(
+ new RestoreRemoteStoreRequest().indices("*", "-remote-store-test-index-*").restoreAllShards(restoreAllShards),
+ PlainActionFuture.newFuture()
+ );
+ ensureGreen(indices[0], indices[1]);
+ assertEquals(shardCount, getNumShards(indices[0]).totalNumShards);
+ verifyRestoredData(indicesStats.get(indices[0]), true, indices[0]);
+ assertEquals(shardCount, getNumShards(indices[1]).totalNumShards);
+ verifyRestoredData(indicesStats.get(indices[1]), true, indices[1]);
+ ensureRed(indices[2], indices[3]);
+ }
+
+ /**
+ * Simulates no-op restore from remote store,
+ * when the index has no data.
+ * @throws IOException IO Exception.
+ */
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/6188")
+ public void testRTSRestoreNoData() throws IOException {
+ testRestoreFlow(0, true, randomIntBetween(1, 5));
+ }
+
+ // TODO: Restore flow - index aliases
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java
index 76ef153fab963..bd546a01b0b88 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java
@@ -9,18 +9,34 @@
package org.opensearch.remotestore;
import org.junit.Before;
+import org.opensearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest;
import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStats;
import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsRequestBuilder;
import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsResponse;
+import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.node.DiscoveryNode;
-import org.opensearch.index.remote.RemoteRefreshSegmentTracker;
+import org.opensearch.cluster.routing.ShardRouting;
+import org.opensearch.cluster.routing.ShardRoutingState;
+import org.opensearch.cluster.routing.allocation.command.MoveAllocationCommand;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.index.IndexSettings;
+import org.opensearch.index.remote.RemoteSegmentTransferTracker;
+import org.opensearch.test.InternalTestCluster;
import org.opensearch.test.OpenSearchIntegTestCase;
+import java.io.IOException;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
+import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 3)
public class RemoteStoreStatsIT extends RemoteStoreBaseIntegTestCase {
@@ -50,14 +66,41 @@ public void testStatsResponseFromAllNodes() {
for (String node : nodes) {
RemoteStoreStatsResponse response = client(node).admin().cluster().prepareRemoteStoreStats(INDEX_NAME, shardId).get();
assertTrue(response.getSuccessfulShards() > 0);
- assertTrue(response.getShards() != null && response.getShards().length != 0);
+ assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length != 0);
final String indexShardId = String.format(Locale.ROOT, "[%s][%s]", INDEX_NAME, shardId);
- List matches = Arrays.stream(response.getShards())
+ List matches = Arrays.stream(response.getRemoteStoreStats())
.filter(stat -> indexShardId.equals(stat.getStats().shardId.toString()))
.collect(Collectors.toList());
assertEquals(1, matches.size());
- RemoteRefreshSegmentTracker.Stats stats = matches.get(0).getStats();
- assertResponseStats(stats);
+ RemoteSegmentTransferTracker.Stats stats = matches.get(0).getStats();
+ validateUploadStats(stats);
+ assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted);
+ }
+
+ // Step 3 - Enable replicas on the existing indices and ensure that download
+ // stats are being populated as well
+ changeReplicaCountAndEnsureGreen(1);
+ for (String node : nodes) {
+ RemoteStoreStatsResponse response = client(node).admin().cluster().prepareRemoteStoreStats(INDEX_NAME, shardId).get();
+ assertTrue(response.getSuccessfulShards() > 0);
+ assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length != 0);
+ final String indexShardId = String.format(Locale.ROOT, "[%s][%s]", INDEX_NAME, shardId);
+ List matches = Arrays.stream(response.getRemoteStoreStats())
+ .filter(stat -> indexShardId.equals(stat.getStats().shardId.toString()))
+ .collect(Collectors.toList());
+ assertEquals(2, matches.size());
+ for (RemoteStoreStats stat : matches) {
+ ShardRouting routing = stat.getShardRouting();
+ validateShardRouting(routing);
+ RemoteSegmentTransferTracker.Stats stats = stat.getStats();
+ if (routing.primary()) {
+ validateUploadStats(stats);
+ assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted);
+ } else {
+ validateDownloadStats(stats);
+ assertEquals(0, stats.totalUploadsStarted);
+ }
+ }
}
}
@@ -79,10 +122,31 @@ public void testStatsResponseAllShards() {
.cluster()
.prepareRemoteStoreStats(INDEX_NAME, null);
RemoteStoreStatsResponse response = remoteStoreStatsRequestBuilder.get();
- assertTrue(response.getSuccessfulShards() == 3);
- assertTrue(response.getShards() != null && response.getShards().length == 3);
- RemoteRefreshSegmentTracker.Stats stats = response.getShards()[0].getStats();
- assertResponseStats(stats);
+ assertEquals(3, response.getSuccessfulShards());
+ assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length == 3);
+ RemoteSegmentTransferTracker.Stats stats = response.getRemoteStoreStats()[0].getStats();
+ validateUploadStats(stats);
+ assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted);
+
+ // Step 3 - Enable replicas on the existing indices and ensure that download
+ // stats are being populated as well
+ changeReplicaCountAndEnsureGreen(1);
+ response = client(node).admin().cluster().prepareRemoteStoreStats(INDEX_NAME, null).get();
+ assertEquals(6, response.getSuccessfulShards());
+ assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length == 6);
+ for (RemoteStoreStats stat : response.getRemoteStoreStats()) {
+ ShardRouting routing = stat.getShardRouting();
+ validateShardRouting(routing);
+ stats = stat.getStats();
+ if (routing.primary()) {
+ validateUploadStats(stats);
+ assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted);
+ } else {
+ validateDownloadStats(stats);
+ assertEquals(0, stats.totalUploadsStarted);
+ }
+ }
+
}
public void testStatsResponseFromLocalNode() {
@@ -105,29 +169,405 @@ public void testStatsResponseFromLocalNode() {
.prepareRemoteStoreStats(INDEX_NAME, null);
remoteStoreStatsRequestBuilder.setLocal(true);
RemoteStoreStatsResponse response = remoteStoreStatsRequestBuilder.get();
- assertTrue(response.getSuccessfulShards() == 1);
- assertTrue(response.getShards() != null && response.getShards().length == 1);
- RemoteRefreshSegmentTracker.Stats stats = response.getShards()[0].getStats();
- assertResponseStats(stats);
+ assertEquals(1, response.getSuccessfulShards());
+ assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length == 1);
+ RemoteSegmentTransferTracker.Stats stats = response.getRemoteStoreStats()[0].getStats();
+ validateUploadStats(stats);
+ assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted);
+ }
+ changeReplicaCountAndEnsureGreen(1);
+ for (String node : nodes) {
+ RemoteStoreStatsRequestBuilder remoteStoreStatsRequestBuilder = client(node).admin()
+ .cluster()
+ .prepareRemoteStoreStats(INDEX_NAME, null);
+ remoteStoreStatsRequestBuilder.setLocal(true);
+ RemoteStoreStatsResponse response = remoteStoreStatsRequestBuilder.get();
+ assertTrue(response.getSuccessfulShards() > 0);
+ assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length != 0);
+ for (RemoteStoreStats stat : response.getRemoteStoreStats()) {
+ ShardRouting routing = stat.getShardRouting();
+ validateShardRouting(routing);
+ RemoteSegmentTransferTracker.Stats stats = stat.getStats();
+ if (routing.primary()) {
+ validateUploadStats(stats);
+ assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted);
+ } else {
+ validateDownloadStats(stats);
+ assertEquals(0, stats.totalUploadsStarted);
+ }
+ }
+ }
+ }
+
+ public void testDownloadStatsCorrectnessSinglePrimarySingleReplica() throws Exception {
+ // Scenario:
+ // - Create index with single primary and single replica shard
+ // - Disable Refresh Interval for the index
+ // - Index documents
+ // - Trigger refresh and flush
+ // - Assert that download stats == upload stats
+ // - Repeat this step for random times (between 5 and 10)
+
+ // Create index with 1 pri and 1 replica and refresh interval disabled
+ createIndex(
+ INDEX_NAME,
+ Settings.builder().put(remoteStoreIndexSettings(1, 1)).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1).build()
+ );
+ ensureGreen(INDEX_NAME);
+
+ // Manually invoke a refresh
+ refresh(INDEX_NAME);
+
+ // Get zero state values
+ // Extract and assert zero state primary stats
+ RemoteStoreStatsResponse zeroStateResponse = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get();
+ RemoteSegmentTransferTracker.Stats zeroStatePrimaryStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats())
+ .filter(remoteStoreStats -> remoteStoreStats.getShardRouting().primary())
+ .collect(Collectors.toList())
+ .get(0)
+ .getStats();
+ assertTrue(
+ zeroStatePrimaryStats.totalUploadsStarted == zeroStatePrimaryStats.totalUploadsSucceeded
+ && zeroStatePrimaryStats.totalUploadsSucceeded == 1
+ );
+ assertTrue(
+ zeroStatePrimaryStats.uploadBytesStarted == zeroStatePrimaryStats.uploadBytesSucceeded
+ && zeroStatePrimaryStats.uploadBytesSucceeded > 0
+ );
+ assertTrue(zeroStatePrimaryStats.totalUploadsFailed == 0 && zeroStatePrimaryStats.uploadBytesFailed == 0);
+
+ // Extract and assert zero state replica stats
+ RemoteSegmentTransferTracker.Stats zeroStateReplicaStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats())
+ .filter(remoteStoreStats -> !remoteStoreStats.getShardRouting().primary())
+ .collect(Collectors.toList())
+ .get(0)
+ .getStats();
+ assertTrue(
+ zeroStateReplicaStats.directoryFileTransferTrackerStats.transferredBytesStarted == 0
+ && zeroStateReplicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded == 0
+ );
+
+ // Index documents
+ for (int i = 1; i <= randomIntBetween(5, 10); i++) {
+ indexSingleDoc(INDEX_NAME);
+ // Running Flush & Refresh manually
+ flushAndRefresh(INDEX_NAME);
+ ensureGreen(INDEX_NAME);
+
+ // Poll for RemoteStore Stats
+ assertBusy(() -> {
+ RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get();
+ // Iterate through the response and extract the relevant segment upload and download stats
+ List primaryStatsList = Arrays.stream(response.getRemoteStoreStats())
+ .filter(remoteStoreStats -> remoteStoreStats.getShardRouting().primary())
+ .collect(Collectors.toList());
+ assertEquals(1, primaryStatsList.size());
+ List replicaStatsList = Arrays.stream(response.getRemoteStoreStats())
+ .filter(remoteStoreStats -> !remoteStoreStats.getShardRouting().primary())
+ .collect(Collectors.toList());
+ assertEquals(1, replicaStatsList.size());
+ RemoteSegmentTransferTracker.Stats primaryStats = primaryStatsList.get(0).getStats();
+ RemoteSegmentTransferTracker.Stats replicaStats = replicaStatsList.get(0).getStats();
+ // Assert Upload syncs - zero state uploads == download syncs
+ assertTrue(primaryStats.totalUploadsStarted > 0);
+ assertTrue(primaryStats.totalUploadsSucceeded > 0);
+ assertTrue(
+ replicaStats.directoryFileTransferTrackerStats.transferredBytesStarted > 0
+ && primaryStats.uploadBytesStarted
+ - zeroStatePrimaryStats.uploadBytesStarted >= replicaStats.directoryFileTransferTrackerStats.transferredBytesStarted
+ );
+ assertTrue(
+ replicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded > 0
+ && primaryStats.uploadBytesSucceeded
+ - zeroStatePrimaryStats.uploadBytesSucceeded >= replicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded
+ );
+ // Assert zero failures
+ assertEquals(0, primaryStats.uploadBytesFailed);
+ assertEquals(0, replicaStats.directoryFileTransferTrackerStats.transferredBytesFailed);
+ }, 60, TimeUnit.SECONDS);
+ }
+ }
+
+ public void testDownloadStatsCorrectnessSinglePrimaryMultipleReplicaShards() throws Exception {
+ // Scenario:
+ // - Create index with single primary and N-1 replica shards (N = no of data nodes)
+ // - Disable Refresh Interval for the index
+ // - Index documents
+ // - Trigger refresh and flush
+ // - Assert that download stats == upload stats
+ // - Repeat this step for random times (between 5 and 10)
+
+ // Create index
+ int dataNodeCount = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes();
+ createIndex(
+ INDEX_NAME,
+ Settings.builder()
+ .put(remoteStoreIndexSettings(dataNodeCount - 1, 1))
+ .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)
+ .build()
+ );
+ ensureGreen(INDEX_NAME);
+
+ // Manually invoke a refresh
+ refresh(INDEX_NAME);
+
+ // Get zero state values
+ // Extract and assert zero state primary stats
+ RemoteStoreStatsResponse zeroStateResponse = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get();
+ RemoteSegmentTransferTracker.Stats zeroStatePrimaryStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats())
+ .filter(remoteStoreStats -> remoteStoreStats.getShardRouting().primary())
+ .collect(Collectors.toList())
+ .get(0)
+ .getStats();
+ assertTrue(
+ zeroStatePrimaryStats.totalUploadsStarted == zeroStatePrimaryStats.totalUploadsSucceeded
+ && zeroStatePrimaryStats.totalUploadsSucceeded == 1
+ );
+ assertTrue(
+ zeroStatePrimaryStats.uploadBytesStarted == zeroStatePrimaryStats.uploadBytesSucceeded
+ && zeroStatePrimaryStats.uploadBytesSucceeded > 0
+ );
+ assertTrue(zeroStatePrimaryStats.totalUploadsFailed == 0 && zeroStatePrimaryStats.uploadBytesFailed == 0);
+
+ // Extract and assert zero state replica stats
+ List zeroStateReplicaStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats())
+ .filter(remoteStoreStats -> !remoteStoreStats.getShardRouting().primary())
+ .collect(Collectors.toList());
+ zeroStateReplicaStats.forEach(stats -> {
+ assertTrue(
+ stats.getStats().directoryFileTransferTrackerStats.transferredBytesStarted == 0
+ && stats.getStats().directoryFileTransferTrackerStats.transferredBytesSucceeded == 0
+ );
+ });
+
+ int currentNodesInCluster = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes();
+ for (int i = 0; i < randomIntBetween(5, 10); i++) {
+ indexSingleDoc(INDEX_NAME);
+ // Running Flush & Refresh manually
+ flushAndRefresh(INDEX_NAME);
+
+ assertBusy(() -> {
+ RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get();
+ assertEquals(currentNodesInCluster, response.getSuccessfulShards());
+ long uploadsStarted = 0, uploadsSucceeded = 0, uploadsFailed = 0;
+ long uploadBytesStarted = 0, uploadBytesSucceeded = 0, uploadBytesFailed = 0;
+ List downloadBytesStarted = new ArrayList<>(), downloadBytesSucceeded = new ArrayList<>(), downloadBytesFailed =
+ new ArrayList<>();
+
+ // Assert that stats for primary shard and replica shard set are equal
+ for (RemoteStoreStats eachStatsObject : response.getRemoteStoreStats()) {
+ RemoteSegmentTransferTracker.Stats stats = eachStatsObject.getStats();
+ if (eachStatsObject.getShardRouting().primary()) {
+ uploadBytesStarted = stats.uploadBytesStarted;
+ uploadBytesSucceeded = stats.uploadBytesSucceeded;
+ uploadBytesFailed = stats.uploadBytesFailed;
+ } else {
+ downloadBytesStarted.add(stats.directoryFileTransferTrackerStats.transferredBytesStarted);
+ downloadBytesSucceeded.add(stats.directoryFileTransferTrackerStats.transferredBytesSucceeded);
+ downloadBytesFailed.add(stats.directoryFileTransferTrackerStats.transferredBytesFailed);
+ }
+ }
+
+ assertEquals(0, uploadsFailed);
+ assertEquals(0, uploadBytesFailed);
+ for (int j = 0; j < response.getSuccessfulShards() - 1; j++) {
+ assertTrue(uploadBytesStarted - zeroStatePrimaryStats.uploadBytesStarted > downloadBytesStarted.get(j));
+ assertTrue(uploadBytesSucceeded - zeroStatePrimaryStats.uploadBytesSucceeded > downloadBytesSucceeded.get(j));
+ assertEquals(0, (long) downloadBytesFailed.get(j));
+ }
+ }, 60, TimeUnit.SECONDS);
}
}
+ public void testStatsOnShardRelocation() {
+ // Scenario:
+ // - Create index with single primary and single replica shard
+ // - Index documents
+ // - Reroute replica shard to one of the remaining nodes
+ // - Assert that remote store stats reflects the new node ID
+
+ // Create index
+ createIndex(INDEX_NAME, remoteStoreIndexSettings(1, 1));
+ ensureGreen(INDEX_NAME);
+ // Index docs
+ indexDocs();
+
+ // Fetch current set of nodes in the cluster
+ List currentNodesInCluster = getClusterState().nodes()
+ .getDataNodes()
+ .values()
+ .stream()
+ .map(DiscoveryNode::getId)
+ .collect(Collectors.toList());
+ DiscoveryNode[] discoveryNodesForIndex = client().admin().cluster().prepareSearchShards(INDEX_NAME).get().getNodes();
+
+ // Fetch nodes with shard copies of the created index
+ List nodeIdsWithShardCopies = new ArrayList<>();
+ Arrays.stream(discoveryNodesForIndex).forEach(eachNode -> nodeIdsWithShardCopies.add(eachNode.getId()));
+
+ // Fetch nodes which does not have any copies of the index
+ List nodeIdsWithoutShardCopy = currentNodesInCluster.stream()
+ .filter(eachNode -> !nodeIdsWithShardCopies.contains(eachNode))
+ .collect(Collectors.toList());
+ assertEquals(1, nodeIdsWithoutShardCopy.size());
+
+ // Manually reroute shard to a node which does not have any shard copy at present
+ ShardRouting replicaShardRouting = getClusterState().routingTable()
+ .index(INDEX_NAME)
+ .shard(0)
+ .assignedShards()
+ .stream()
+ .filter(shard -> !shard.primary())
+ .collect(Collectors.toList())
+ .get(0);
+ String sourceNode = replicaShardRouting.currentNodeId();
+ String destinationNode = nodeIdsWithoutShardCopy.get(0);
+ relocateShard(0, sourceNode, destinationNode);
+ RemoteStoreStats[] allShardsStats = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get().getRemoteStoreStats();
+ RemoteStoreStats replicaShardStat = Arrays.stream(allShardsStats)
+ .filter(eachStat -> !eachStat.getShardRouting().primary())
+ .collect(Collectors.toList())
+ .get(0);
+
+ // Assert that remote store stats reflect the new shard state
+ assertEquals(ShardRoutingState.STARTED, replicaShardStat.getShardRouting().state());
+ assertEquals(destinationNode, replicaShardStat.getShardRouting().currentNodeId());
+ }
+
+ public void testStatsOnShardUnassigned() throws IOException {
+ // Scenario:
+ // - Create index with single primary and two replica shard
+ // - Index documents
+ // - Stop one data node
+ // - Assert:
+ // a. Total shard Count in the response object is equal to the previous node count
+ // b. Successful shard count in the response object is equal to the new node count
+ createIndex(INDEX_NAME, remoteStoreIndexSettings(2, 1));
+ ensureGreen(INDEX_NAME);
+ indexDocs();
+ ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().get();
+ int dataNodeCountBeforeStop = clusterHealthResponse.getNumberOfDataNodes();
+ int nodeCount = clusterHealthResponse.getNumberOfNodes();
+ String nodeToBeStopped = randomBoolean() ? primaryNodeName(INDEX_NAME) : replicaNodeName(INDEX_NAME);
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodeToBeStopped));
+ ensureYellowAndNoInitializingShards(INDEX_NAME);
+ ensureStableCluster(nodeCount - 1);
+ RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get();
+ int dataNodeCountAfterStop = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes();
+ assertEquals(dataNodeCountBeforeStop, response.getTotalShards());
+ assertEquals(dataNodeCountAfterStop, response.getSuccessfulShards());
+ // Indexing docs to ensure that the primary has started
+ indexSingleDoc(INDEX_NAME);
+ }
+
+ public void testStatsOnRemoteStoreRestore() throws IOException {
+ // Creating an index with primary shard count == total nodes in cluster and 0 replicas
+ int dataNodeCount = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes();
+ createIndex(INDEX_NAME, remoteStoreIndexSettings(0, dataNodeCount));
+ ensureGreen(INDEX_NAME);
+
+ // Index some docs to ensure segments being uploaded to remote store
+ indexDocs();
+ refresh(INDEX_NAME);
+
+ // Stop one data node to force the index into a red state
+ internalCluster().stopRandomDataNode();
+ ensureRed(INDEX_NAME);
+
+ // Start another data node to fulfil the previously launched capacity
+ internalCluster().startDataOnlyNode();
+
+ // Restore index from remote store
+ assertAcked(client().admin().indices().prepareClose(INDEX_NAME));
+ client().admin()
+ .cluster()
+ .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(true), PlainActionFuture.newFuture());
+
+ // Ensure that the index is green
+ ensureGreen(INDEX_NAME);
+
+ // Index some more docs to force segment uploads to remote store
+ indexDocs();
+
+ RemoteStoreStatsResponse remoteStoreStatsResponse = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get();
+ Arrays.stream(remoteStoreStatsResponse.getRemoteStoreStats()).forEach(statObject -> {
+ RemoteSegmentTransferTracker.Stats segmentTracker = statObject.getStats();
+ // Assert that we have both upload and download stats for the index
+ assertTrue(
+ segmentTracker.totalUploadsStarted > 0 && segmentTracker.totalUploadsSucceeded > 0 && segmentTracker.totalUploadsFailed == 0
+ );
+ assertTrue(
+ segmentTracker.directoryFileTransferTrackerStats.transferredBytesStarted > 0
+ && segmentTracker.directoryFileTransferTrackerStats.transferredBytesSucceeded > 0
+ );
+ });
+ }
+
+ public void testNonZeroPrimaryStatsOnNewlyCreatedIndexWithZeroDocs() throws Exception {
+ // Create an index with one primary and one replica shard
+ createIndex(INDEX_NAME, remoteStoreIndexSettings(1, 1));
+ ensureGreen(INDEX_NAME);
+ refresh(INDEX_NAME);
+
+ // Ensure that the index has 0 documents in it
+ assertEquals(0, client().admin().indices().prepareStats(INDEX_NAME).get().getTotal().docs.getCount());
+
+ // Assert that within 5 seconds the download and upload stats moves to a non-zero value
+ assertBusy(() -> {
+ RemoteStoreStats[] remoteStoreStats = client().admin()
+ .cluster()
+ .prepareRemoteStoreStats(INDEX_NAME, "0")
+ .get()
+ .getRemoteStoreStats();
+ Arrays.stream(remoteStoreStats).forEach(statObject -> {
+ RemoteSegmentTransferTracker.Stats segmentTracker = statObject.getStats();
+ if (statObject.getShardRouting().primary()) {
+ assertTrue(
+ segmentTracker.totalUploadsSucceeded == 1
+ && segmentTracker.totalUploadsStarted == segmentTracker.totalUploadsSucceeded
+ && segmentTracker.totalUploadsFailed == 0
+ );
+ } else {
+ assertTrue(
+ segmentTracker.directoryFileTransferTrackerStats.transferredBytesStarted == 0
+ && segmentTracker.directoryFileTransferTrackerStats.transferredBytesSucceeded == 0
+ );
+ }
+ });
+ }, 5, TimeUnit.SECONDS);
+ }
+
private void indexDocs() {
- // Indexing documents along with refreshes and flushes.
for (int i = 0; i < randomIntBetween(5, 10); i++) {
if (randomBoolean()) {
flush(INDEX_NAME);
} else {
refresh(INDEX_NAME);
}
- int numberOfOperations = randomIntBetween(20, 50);
+ int numberOfOperations = randomIntBetween(10, 30);
for (int j = 0; j < numberOfOperations; j++) {
indexSingleDoc(INDEX_NAME);
}
}
}
- private void assertResponseStats(RemoteRefreshSegmentTracker.Stats stats) {
+ private void changeReplicaCountAndEnsureGreen(int replicaCount) {
+ assertAcked(
+ client().admin()
+ .indices()
+ .prepareUpdateSettings(INDEX_NAME)
+ .setSettings(Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, replicaCount))
+ );
+ ensureYellowAndNoInitializingShards(INDEX_NAME);
+ ensureGreen(INDEX_NAME);
+ }
+
+ private void relocateShard(int shardId, String sourceNode, String destNode) {
+ assertAcked(client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(INDEX_NAME, shardId, sourceNode, destNode)));
+ ensureGreen(INDEX_NAME);
+ }
+
+ private void validateUploadStats(RemoteSegmentTransferTracker.Stats stats) {
assertEquals(0, stats.refreshTimeLagMs);
assertEquals(stats.localRefreshNumber, stats.remoteRefreshNumber);
assertTrue(stats.uploadBytesStarted > 0);
@@ -143,4 +583,32 @@ private void assertResponseStats(RemoteRefreshSegmentTracker.Stats stats) {
assertTrue(stats.uploadBytesPerSecMovingAverage > 0);
assertTrue(stats.uploadTimeMovingAverage > 0);
}
+
+ private void validateDownloadStats(RemoteSegmentTransferTracker.Stats stats) {
+ assertTrue(stats.directoryFileTransferTrackerStats.lastTransferTimestampMs > 0);
+ assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesStarted > 0);
+ assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesSucceeded > 0);
+ assertEquals(stats.directoryFileTransferTrackerStats.transferredBytesFailed, 0);
+ assertTrue(stats.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes > 0);
+ assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesMovingAverage > 0);
+ assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage > 0);
+ }
+
+ // Validate if the shardRouting obtained from cluster state contains the exact same routing object
+ // parameters as obtained from the remote store stats API
+ private void validateShardRouting(ShardRouting routing) {
+ Stream currentRoutingTable = getClusterState().routingTable()
+ .getIndicesRouting()
+ .get(INDEX_NAME)
+ .shard(routing.id())
+ .assignedShards()
+ .stream();
+ assertTrue(
+ currentRoutingTable.anyMatch(
+ r -> (r.currentNodeId().equals(routing.currentNodeId())
+ && r.state().equals(routing.state())
+ && r.primary() == routing.primary())
+ )
+ );
+ }
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java
index 6764c50175e61..b68fd1f764a63 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java
@@ -11,17 +11,22 @@
import com.carrotsearch.randomizedtesting.RandomizedTest;
import org.junit.Before;
import org.opensearch.action.admin.indices.close.CloseIndexResponse;
+import org.opensearch.action.index.IndexResponse;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.routing.IndexShardRoutingTable;
import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.common.settings.Settings;
+import org.opensearch.core.rest.RestStatus;
import org.opensearch.test.BackgroundIndexer;
import org.opensearch.test.InternalTestCluster;
import org.opensearch.test.OpenSearchIntegTestCase;
import java.util.Locale;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@@ -39,11 +44,7 @@ public void setup() {
@Override
public Settings indexSettings() {
- return Settings.builder()
- .put(super.indexSettings())
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, shard_count)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME)
- .build();
+ return Settings.builder().put(super.indexSettings()).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, shard_count).build();
}
public void testPromoteReplicaToPrimary() throws Exception {
@@ -120,4 +121,64 @@ public void testPromoteReplicaToPrimary() throws Exception {
refresh(indexName);
assertHitCount(client().prepareSearch(indexName).setSize(0).get(), numOfDocs);
}
+
+ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/9130")
+ public void testFailoverWhileIndexing() throws Exception {
+ internalCluster().startNode();
+ internalCluster().startNode();
+ final String indexName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT);
+ shard_count = scaledRandomIntBetween(1, 5);
+ createIndex(indexName);
+ ensureGreen(indexName);
+ int docCount = scaledRandomIntBetween(20, 50);
+ final int indexDocAfterFailover = scaledRandomIntBetween(20, 50);
+ AtomicInteger numAutoGenDocs = new AtomicInteger();
+ CountDownLatch latch = new CountDownLatch(1);
+ final AtomicBoolean finished = new AtomicBoolean(false);
+ Thread indexingThread = new Thread(() -> {
+ int docsAfterFailover = 0;
+ while (finished.get() == false && numAutoGenDocs.get() < docCount) {
+ IndexResponse indexResponse = internalCluster().clusterManagerClient()
+ .prepareIndex(indexName)
+ .setSource("field", numAutoGenDocs.get())
+ .get();
+
+ if (indexResponse.status() == RestStatus.CREATED || indexResponse.status() == RestStatus.ACCEPTED) {
+ numAutoGenDocs.incrementAndGet();
+ if (numAutoGenDocs.get() == docCount / 2) {
+ if (random().nextInt(3) == 0) {
+ refresh(indexName);
+ } else if (random().nextInt(2) == 0) {
+ flush(indexName);
+ }
+ // Node is killed on this
+ latch.countDown();
+ } else if (numAutoGenDocs.get() > docCount / 2) {
+ docsAfterFailover++;
+ if (docsAfterFailover == indexDocAfterFailover) {
+ finished.set(true);
+ }
+ }
+ }
+ }
+ logger.debug("Done indexing");
+ });
+ indexingThread.start();
+ latch.await();
+
+ ClusterState state = client(internalCluster().getClusterManagerName()).admin().cluster().prepareState().get().getState();
+ final int numShards = state.metadata().index(indexName).getNumberOfShards();
+ final ShardRouting primaryShard = state.routingTable().index(indexName).shard(randomIntBetween(0, numShards - 1)).primaryShard();
+ final DiscoveryNode randomNode = state.nodes().resolveNode(primaryShard.currentNodeId());
+
+ // stop the random data node, all remaining shards are promoted to primaries
+ internalCluster().stopRandomNode(InternalTestCluster.nameFilter(randomNode.getName()));
+ ensureYellowAndNoInitializingShards(indexName);
+ indexingThread.join();
+ refresh(indexName);
+ assertHitCount(
+ client(internalCluster().getClusterManagerName()).prepareSearch(indexName).setSize(0).setTrackTotalHits(true).get(),
+ numAutoGenDocs.get()
+ );
+ }
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java
index f298fac7c894e..6f76c21cc0411 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java
@@ -10,7 +10,6 @@
import org.junit.After;
import org.junit.Before;
-import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.FeatureFlags;
import org.opensearch.indices.replication.SegmentReplicationIT;
@@ -18,6 +17,7 @@
import java.nio.file.Path;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
/**
@@ -32,13 +32,12 @@ public class SegmentReplicationUsingRemoteStoreIT extends SegmentReplicationIT {
private static final String REPOSITORY_NAME = "test-remote-store-repo";
@Override
- public Settings indexSettings() {
- return Settings.builder()
- .put(super.indexSettings())
- .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME)
- .build();
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(remoteStoreClusterSettings(REPOSITORY_NAME)).build();
+ }
+
+ protected boolean segmentReplicationWithRemoteEnabled() {
+ return true;
}
@Override
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java
index 0b64680033d84..38db7a7c7269e 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java
@@ -10,15 +10,14 @@
import org.junit.After;
import org.junit.Before;
-import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.SegmentReplicationPressureIT;
-import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.test.OpenSearchIntegTestCase;
import java.nio.file.Path;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
/**
@@ -32,14 +31,8 @@ public class SegmentReplicationWithRemoteStorePressureIT extends SegmentReplicat
private static final String REPOSITORY_NAME = "test-remote-store-repo";
@Override
- public Settings indexSettings() {
- return Settings.builder()
- .put(super.indexSettings())
- .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME)
- .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
- .build();
+ protected boolean segmentReplicationWithRemoteEnabled() {
+ return true;
}
@Override
@@ -51,6 +44,11 @@ protected Settings featureFlagSettings() {
.build();
}
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(remoteStoreClusterSettings(REPOSITORY_NAME)).build();
+ }
+
@Before
public void setup() {
internalCluster().startClusterManagerOnlyNode();
diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java
index 529e84d281476..b801c28983890 100644
--- a/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java
@@ -8,31 +8,22 @@
package org.opensearch.remotestore.multipart;
-import org.junit.After;
import org.junit.Before;
-import org.opensearch.action.index.IndexResponse;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.cluster.metadata.IndexMetadata;
-import org.opensearch.common.UUIDs;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.IndexModule;
import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.plugins.Plugin;
-import org.opensearch.remotestore.multipart.mocks.MockFsRepository;
+import org.opensearch.remotestore.RemoteStoreBaseIntegTestCase;
import org.opensearch.remotestore.multipart.mocks.MockFsRepositoryPlugin;
-import org.opensearch.test.OpenSearchIntegTestCase;
-import java.nio.file.Path;
import java.util.Collection;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+public class RemoteStoreMultipartFileCorruptionIT extends RemoteStoreBaseIntegTestCase {
-public class RemoteStoreMultipartFileCorruptionIT extends OpenSearchIntegTestCase {
-
- protected static final String REPOSITORY_NAME = "test-remore-store-repo";
private static final String INDEX_NAME = "remote-store-test-idx-1";
@Override
@@ -40,34 +31,9 @@ protected Collection> nodePlugins() {
return Stream.concat(super.nodePlugins().stream(), Stream.of(MockFsRepositoryPlugin.class)).collect(Collectors.toList());
}
- @Override
- protected Settings featureFlagSettings() {
- return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.REMOTE_STORE, "true").build();
- }
-
@Before
public void setup() {
- internalCluster().startClusterManagerOnlyNode();
- Path absolutePath = randomRepoPath().toAbsolutePath();
- putRepository(absolutePath);
- }
-
- protected void putRepository(Path path) {
- assertAcked(
- clusterAdmin().preparePutRepository(REPOSITORY_NAME)
- .setType(MockFsRepositoryPlugin.TYPE)
- .setSettings(
- Settings.builder()
- .put("location", path)
- // custom setting for MockFsRepositoryPlugin
- .put(MockFsRepository.TRIGGER_DATA_INTEGRITY_FAILURE.getKey(), true)
- )
- );
- }
-
- @After
- public void teardown() {
- assertAcked(clusterAdmin().prepareDeleteRepository(REPOSITORY_NAME));
+ setupRepo();
}
protected Settings remoteStoreIndexSettings() {
@@ -78,26 +44,16 @@ protected Settings remoteStoreIndexSettings() {
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false)
.put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
- .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME)
.build();
}
- private IndexResponse indexSingleDoc() {
- return client().prepareIndex(INDEX_NAME)
- .setId(UUIDs.randomBase64UUID())
- .setSource(randomAlphaOfLength(5), randomAlphaOfLength(5))
- .get();
- }
-
public void testLocalFileCorruptionDuringUpload() {
internalCluster().startDataOnlyNodes(1);
createIndex(INDEX_NAME, remoteStoreIndexSettings());
ensureYellowAndNoInitializingShards(INDEX_NAME);
ensureGreen(INDEX_NAME);
- indexSingleDoc();
+ indexSingleDoc(INDEX_NAME);
client().admin()
.indices()
diff --git a/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java
index 4e7f2ae486c93..d5c744a49c62d 100644
--- a/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java
@@ -37,8 +37,8 @@
import org.opensearch.common.blobstore.fs.FsBlobStore;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.repositories.blobstore.OpenSearchBlobStoreRepositoryIntegTestCase;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java
index 9db0ac4590efa..eedd9328826a5 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java
@@ -48,10 +48,10 @@
import org.opensearch.action.search.SearchScrollAction;
import org.opensearch.action.search.ShardSearchFailure;
import org.opensearch.action.support.WriteRequest;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
import org.opensearch.plugins.Plugin;
import org.opensearch.plugins.PluginsService;
import org.opensearch.script.MockScriptPlugin;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java
index e554a3be20528..fe62f5902cbba 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java
@@ -35,8 +35,8 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.settings.Settings;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.query.WrapperQueryBuilder;
import org.opensearch.search.aggregations.bucket.filter.FiltersAggregationBuilder;
@@ -59,7 +59,7 @@ public void testWrapperQueryIsRewritten() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
BytesReference bytesReference;
- try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) {
+ try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType)) {
builder.startObject();
{
builder.startObject("terms");
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java
index 617c5745c9bba..271492e47d2ef 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java
@@ -35,7 +35,6 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.common.time.DateFormatters;
@@ -1309,16 +1308,15 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception {
}
public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception {
- String mappingJson = Strings.toString(
- jsonBuilder().startObject()
- .startObject("properties")
- .startObject("date")
- .field("type", "date")
- .field("format", "strict_date_optional_time||dd-MM-yyyy")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mappingJson = jsonBuilder().startObject()
+ .startObject("properties")
+ .startObject("date")
+ .field("type", "date")
+ .field("format", "strict_date_optional_time||dd-MM-yyyy")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
prepareCreate("idx2").setMapping(mappingJson).get();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
for (int i = 0; i < reqs.length; i++) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java
index 43d49dc0bfd60..0854faf6c515c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java
@@ -35,7 +35,6 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchRequestBuilder;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
@@ -208,7 +207,7 @@ public void testXContentResponse() throws Exception {
+ "\"score\":0.75,"
+ "\"bg_count\":4"
+ "}]}}]}}";
- assertThat(Strings.toString(responseBuilder), equalTo(result));
+ assertThat(responseBuilder.toString(), equalTo(result));
}
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java
index 7775618ba5b13..f7195c7cb4d2c 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java
@@ -11,7 +11,7 @@
import org.junit.After;
import org.junit.Before;
import org.opensearch.action.index.IndexRequestBuilder;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.index.fielddata.ScriptDocValues;
import org.opensearch.plugins.Plugin;
import org.opensearch.search.aggregations.AggregationTestScriptsPlugin;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java
index be69428453952..a54ca880c3dd0 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java
@@ -35,7 +35,7 @@
import org.opensearch.OpenSearchException;
import org.opensearch.ExceptionsHelper;
import org.opensearch.action.index.IndexRequestBuilder;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.common.settings.Settings;
import org.opensearch.indices.breaker.HierarchyCircuitBreakerService;
import org.opensearch.search.aggregations.Aggregator;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java
index 20fc6aaee20c9..6a62dabd98bce 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java
@@ -53,7 +53,7 @@
import java.util.Map;
import static java.util.Collections.emptyMap;
-import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList;
+import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList;
import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
import static org.opensearch.index.query.QueryBuilders.termQuery;
import static org.opensearch.search.aggregations.AggregationBuilders.filter;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java
index 2660dbe0a88ed..ad3fd6517d1b1 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java
@@ -34,7 +34,7 @@
import org.apache.logging.log4j.LogManager;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.plugins.Plugin;
import org.opensearch.script.Script;
import org.opensearch.script.ScriptType;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java
index adf027222d7d9..602cfe344b496 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java
@@ -34,7 +34,7 @@
import org.apache.logging.log4j.LogManager;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.plugins.Plugin;
import org.opensearch.script.Script;
import org.opensearch.script.ScriptType;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java
index fda15f9b90ea2..2c05ed0bac44a 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java
@@ -34,7 +34,7 @@
import org.apache.logging.log4j.LogManager;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.plugins.Plugin;
import org.opensearch.script.Script;
import org.opensearch.script.ScriptType;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java
index 0b55ea9119d89..b33adea494d17 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java
@@ -43,7 +43,6 @@
import org.opensearch.action.index.IndexResponse;
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
import org.opensearch.common.settings.Settings;
@@ -81,16 +80,15 @@ protected boolean addMockInternalEngine() {
}
public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("test")
- .field("type", "keyword")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("test")
+ .field("type", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
final double lowLevelRate;
final double topLevelRate;
if (frequently()) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java
index b0adc00f37fee..7cd389e6274dc 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java
@@ -42,7 +42,6 @@
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.client.Requests;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentFactory;
@@ -69,16 +68,15 @@ protected Collection> nodePlugins() {
}
public void testRandomDirectoryIOExceptions() throws IOException, InterruptedException, ExecutionException {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("test")
- .field("type", "keyword")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("test")
+ .field("type", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
final double exceptionRate;
final double exceptionOnOpenRate;
if (frequently()) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
index f2a22b99a86a3..bf1ca409eee92 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
@@ -44,7 +44,6 @@
import org.opensearch.action.search.SearchRequestBuilder;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.support.WriteRequest;
-import org.opensearch.common.Strings;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.settings.Settings.Builder;
@@ -3231,26 +3230,25 @@ public void testCopyToFields() throws Exception {
}
public void testACopyFieldWithNestedQuery() throws Exception {
- String mapping = Strings.toString(
- jsonBuilder().startObject()
- .startObject("properties")
- .startObject("foo")
- .field("type", "nested")
- .startObject("properties")
- .startObject("text")
- .field("type", "text")
- .field("copy_to", "foo_text")
- .endObject()
- .endObject()
- .endObject()
- .startObject("foo_text")
- .field("type", "text")
- .field("term_vector", "with_positions_offsets")
- .field("store", true)
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = jsonBuilder().startObject()
+ .startObject("properties")
+ .startObject("foo")
+ .field("type", "nested")
+ .startObject("properties")
+ .startObject("text")
+ .field("type", "text")
+ .field("copy_to", "foo_text")
+ .endObject()
+ .endObject()
+ .endObject()
+ .startObject("foo_text")
+ .field("type", "text")
+ .field("term_vector", "with_positions_offsets")
+ .field("store", true)
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
prepareCreate("test").setMapping(mapping).get();
client().prepareIndex("test")
@@ -3361,25 +3359,24 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception {
}
public void testWithNestedQuery() throws Exception {
- String mapping = Strings.toString(
- jsonBuilder().startObject()
- .startObject("properties")
- .startObject("text")
- .field("type", "text")
- .field("index_options", "offsets")
- .field("term_vector", "with_positions_offsets")
- .endObject()
- .startObject("foo")
- .field("type", "nested")
- .startObject("properties")
- .startObject("text")
- .field("type", "text")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = jsonBuilder().startObject()
+ .startObject("properties")
+ .startObject("text")
+ .field("type", "text")
+ .field("index_options", "offsets")
+ .field("term_vector", "with_positions_offsets")
+ .endObject()
+ .startObject("foo")
+ .field("type", "nested")
+ .startObject("properties")
+ .startObject("text")
+ .field("type", "text")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
prepareCreate("test").setMapping(mapping).get();
client().prepareIndex("test")
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java
index 90d0a59f7b58d..53eb290e1edbf 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java
@@ -36,7 +36,6 @@
import org.opensearch.action.search.SearchRequestBuilder;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.Numbers;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.collect.MapBuilder;
@@ -201,27 +200,26 @@ static Object docScript(Map vars, String fieldName) {
public void testStoredFields() throws Exception {
createIndex("test");
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(MapperService.SINGLE_MAPPING_NAME)
- .startObject("properties")
- .startObject("field1")
- .field("type", "text")
- .field("store", true)
- .endObject()
- .startObject("field2")
- .field("type", "text")
- .field("store", false)
- .endObject()
- .startObject("field3")
- .field("type", "text")
- .field("store", true)
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(MapperService.SINGLE_MAPPING_NAME)
+ .startObject("properties")
+ .startObject("field1")
+ .field("type", "text")
+ .field("store", true)
+ .endObject()
+ .startObject("field2")
+ .field("type", "text")
+ .field("store", false)
+ .endObject()
+ .startObject("field3")
+ .field("type", "text")
+ .field("store", true)
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get();
@@ -304,19 +302,18 @@ public void testStoredFields() throws Exception {
public void testScriptDocAndFields() throws Exception {
createIndex("test");
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(MapperService.SINGLE_MAPPING_NAME)
- .startObject("properties")
- .startObject("num1")
- .field("type", "double")
- .field("store", true)
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(MapperService.SINGLE_MAPPING_NAME)
+ .startObject("properties")
+ .startObject("num1")
+ .field("type", "double")
+ .field("store", true)
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get();
@@ -406,19 +403,18 @@ public void testScriptDocAndFields() throws Exception {
public void testScriptWithUnsignedLong() throws Exception {
createIndex("test");
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(MapperService.SINGLE_MAPPING_NAME)
- .startObject("properties")
- .startObject("unsigned_num1")
- .field("type", "unsigned_long")
- .field("store", true)
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(MapperService.SINGLE_MAPPING_NAME)
+ .startObject("properties")
+ .startObject("unsigned_num1")
+ .field("type", "unsigned_long")
+ .field("store", true)
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get();
@@ -509,18 +505,17 @@ public void testScriptWithUnsignedLong() throws Exception {
public void testScriptFieldWithNanos() throws Exception {
createIndex("test");
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(MapperService.SINGLE_MAPPING_NAME)
- .startObject("properties")
- .startObject("date")
- .field("type", "date_nanos")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(MapperService.SINGLE_MAPPING_NAME)
+ .startObject("properties")
+ .startObject("date")
+ .field("type", "date_nanos")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get();
String date = "2019-01-31T10:00:00.123456789Z";
@@ -706,58 +701,57 @@ public void testPartialFields() throws Exception {
public void testStoredFieldsWithoutSource() throws Exception {
createIndex("test");
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(MapperService.SINGLE_MAPPING_NAME)
- .startObject("_source")
- .field("enabled", false)
- .endObject()
- .startObject("properties")
- .startObject("byte_field")
- .field("type", "byte")
- .field("store", true)
- .endObject()
- .startObject("short_field")
- .field("type", "short")
- .field("store", true)
- .endObject()
- .startObject("integer_field")
- .field("type", "integer")
- .field("store", true)
- .endObject()
- .startObject("long_field")
- .field("type", "long")
- .field("store", true)
- .endObject()
- .startObject("float_field")
- .field("type", "float")
- .field("store", true)
- .endObject()
- .startObject("double_field")
- .field("type", "double")
- .field("store", true)
- .endObject()
- .startObject("date_field")
- .field("type", "date")
- .field("store", true)
- .endObject()
- .startObject("boolean_field")
- .field("type", "boolean")
- .field("store", true)
- .endObject()
- .startObject("binary_field")
- .field("type", "binary")
- .field("store", true)
- .endObject()
- .startObject("unsigned_long_field")
- .field("type", "unsigned_long")
- .field("store", true)
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(MapperService.SINGLE_MAPPING_NAME)
+ .startObject("_source")
+ .field("enabled", false)
+ .endObject()
+ .startObject("properties")
+ .startObject("byte_field")
+ .field("type", "byte")
+ .field("store", true)
+ .endObject()
+ .startObject("short_field")
+ .field("type", "short")
+ .field("store", true)
+ .endObject()
+ .startObject("integer_field")
+ .field("type", "integer")
+ .field("store", true)
+ .endObject()
+ .startObject("long_field")
+ .field("type", "long")
+ .field("store", true)
+ .endObject()
+ .startObject("float_field")
+ .field("type", "float")
+ .field("store", true)
+ .endObject()
+ .startObject("double_field")
+ .field("type", "double")
+ .field("store", true)
+ .endObject()
+ .startObject("date_field")
+ .field("type", "date")
+ .field("store", true)
+ .endObject()
+ .startObject("boolean_field")
+ .field("type", "boolean")
+ .field("store", true)
+ .endObject()
+ .startObject("binary_field")
+ .field("type", "binary")
+ .field("store", true)
+ .endObject()
+ .startObject("unsigned_long_field")
+ .field("type", "unsigned_long")
+ .field("store", true)
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get();
@@ -944,59 +938,58 @@ public void testSingleValueFieldDatatField() throws ExecutionException, Interrup
public void testDocValueFields() throws Exception {
createIndex("test");
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject(MapperService.SINGLE_MAPPING_NAME)
- .startObject("_source")
- .field("enabled", false)
- .endObject()
- .startObject("properties")
- .startObject("text_field")
- .field("type", "text")
- .field("fielddata", true)
- .endObject()
- .startObject("keyword_field")
- .field("type", "keyword")
- .endObject()
- .startObject("byte_field")
- .field("type", "byte")
- .endObject()
- .startObject("short_field")
- .field("type", "short")
- .endObject()
- .startObject("integer_field")
- .field("type", "integer")
- .endObject()
- .startObject("long_field")
- .field("type", "long")
- .endObject()
- .startObject("float_field")
- .field("type", "float")
- .endObject()
- .startObject("double_field")
- .field("type", "double")
- .endObject()
- .startObject("date_field")
- .field("type", "date")
- .endObject()
- .startObject("boolean_field")
- .field("type", "boolean")
- .endObject()
- .startObject("binary_field")
- .field("type", "binary")
- .field("doc_values", true) // off by default on binary fields
- .endObject()
- .startObject("ip_field")
- .field("type", "ip")
- .endObject()
- .startObject("flat_object_field")
- .field("type", "flat_object")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject(MapperService.SINGLE_MAPPING_NAME)
+ .startObject("_source")
+ .field("enabled", false)
+ .endObject()
+ .startObject("properties")
+ .startObject("text_field")
+ .field("type", "text")
+ .field("fielddata", true)
+ .endObject()
+ .startObject("keyword_field")
+ .field("type", "keyword")
+ .endObject()
+ .startObject("byte_field")
+ .field("type", "byte")
+ .endObject()
+ .startObject("short_field")
+ .field("type", "short")
+ .endObject()
+ .startObject("integer_field")
+ .field("type", "integer")
+ .endObject()
+ .startObject("long_field")
+ .field("type", "long")
+ .endObject()
+ .startObject("float_field")
+ .field("type", "float")
+ .endObject()
+ .startObject("double_field")
+ .field("type", "double")
+ .endObject()
+ .startObject("date_field")
+ .field("type", "date")
+ .endObject()
+ .startObject("boolean_field")
+ .field("type", "boolean")
+ .endObject()
+ .startObject("binary_field")
+ .field("type", "binary")
+ .field("doc_values", true) // off by default on binary fields
+ .endObject()
+ .startObject("ip_field")
+ .field("type", "ip")
+ .endObject()
+ .startObject("flat_object_field")
+ .field("type", "flat_object")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get();
@@ -1067,9 +1060,6 @@ public void testDocValueFields() throws Exception {
)
)
);
- String json = Strings.toString(
- XContentFactory.jsonBuilder().startObject().startObject("flat_object_field").field("foo", "bar").endObject().endObject()
- );
assertThat(searchResponse.getHits().getAt(0).getFields().get("byte_field").getValue().toString(), equalTo("1"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("short_field").getValue().toString(), equalTo("2"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("integer_field").getValue(), equalTo((Object) 3L));
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java
index bb14ed1ea5578..d4467b49d1c18 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java
@@ -47,7 +47,6 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.Priority;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.geo.GeoUtils;
@@ -211,17 +210,16 @@ public void testShapeRelations() throws Exception {
assertTrue("Disjoint relation is not supported", disjointSupport);
assertTrue("within relation is not supported", withinSupport);
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("area")
- .field("type", "geo_shape")
- .field("tree", "geohash")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("area")
+ .field("type", "geo_shape")
+ .field("tree", "geohash")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes").setMapping(mapping);
mappingRequest.get();
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java
index 89eb6038d8110..cf1a1f82d7200 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java
@@ -36,7 +36,6 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.routing.IndexShardRoutingTable;
-import org.opensearch.common.Strings;
import org.opensearch.common.geo.builders.PointBuilder;
import org.opensearch.common.geo.builders.ShapeBuilder;
import org.opensearch.common.settings.Settings;
@@ -73,32 +72,30 @@ protected Settings nodeSettings(int nodeOrdinal) {
*/
public void testOrientationPersistence() throws Exception {
String idxName = "orientation";
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("location")
- .field("type", "geo_shape")
- .field("orientation", "left")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("location")
+ .field("type", "geo_shape")
+ .field("orientation", "left")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
// create index
assertAcked(prepareCreate(idxName).setMapping(mapping));
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("location")
- .field("type", "geo_shape")
- .field("orientation", "right")
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("location")
+ .field("type", "geo_shape")
+ .field("orientation", "right")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(prepareCreate(idxName + "2").setMapping(mapping));
ensureGreen(idxName, idxName + "2");
@@ -140,44 +137,43 @@ public void testIgnoreMalformed() throws Exception {
ensureGreen();
// test self crossing ccw poly not crossing dateline
- String polygonGeoJson = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .field("type", "Polygon")
- .startArray("coordinates")
- .startArray()
- .startArray()
- .value(176.0)
- .value(15.0)
- .endArray()
- .startArray()
- .value(-177.0)
- .value(10.0)
- .endArray()
- .startArray()
- .value(-177.0)
- .value(-10.0)
- .endArray()
- .startArray()
- .value(176.0)
- .value(-15.0)
- .endArray()
- .startArray()
- .value(-177.0)
- .value(15.0)
- .endArray()
- .startArray()
- .value(172.0)
- .value(0.0)
- .endArray()
- .startArray()
- .value(176.0)
- .value(15.0)
- .endArray()
- .endArray()
- .endArray()
- .endObject()
- );
+ String polygonGeoJson = XContentFactory.jsonBuilder()
+ .startObject()
+ .field("type", "Polygon")
+ .startArray("coordinates")
+ .startArray()
+ .startArray()
+ .value(176.0)
+ .value(15.0)
+ .endArray()
+ .startArray()
+ .value(-177.0)
+ .value(10.0)
+ .endArray()
+ .startArray()
+ .value(-177.0)
+ .value(-10.0)
+ .endArray()
+ .startArray()
+ .value(176.0)
+ .value(-15.0)
+ .endArray()
+ .startArray()
+ .value(-177.0)
+ .value(15.0)
+ .endArray()
+ .startArray()
+ .value(172.0)
+ .value(0.0)
+ .endArray()
+ .startArray()
+ .value(176.0)
+ .value(15.0)
+ .endArray()
+ .endArray()
+ .endArray()
+ .endObject()
+ .toString();
indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson));
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get();
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java
index 11f2132bb29de..6332e2b94750d 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java
@@ -37,7 +37,6 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.routing.IndexShardRoutingTable;
-import org.opensearch.common.Strings;
import org.opensearch.common.geo.builders.ShapeBuilder;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.ToXContent;
@@ -65,34 +64,32 @@ public class LegacyGeoShapeIntegrationIT extends OpenSearchIntegTestCase {
*/
public void testOrientationPersistence() throws Exception {
String idxName = "orientation";
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("location")
- .field("type", "geo_shape")
- .field("tree", "quadtree")
- .field("orientation", "left")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("location")
+ .field("type", "geo_shape")
+ .field("tree", "quadtree")
+ .field("orientation", "left")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
// create index
assertAcked(prepareCreate(idxName).setMapping(mapping));
- mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("location")
- .field("type", "geo_shape")
- .field("tree", "quadtree")
- .field("orientation", "right")
- .endObject()
- .endObject()
- .endObject()
- );
+ mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("location")
+ .field("type", "geo_shape")
+ .field("tree", "quadtree")
+ .field("orientation", "right")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(prepareCreate(idxName + "2").setMapping(mapping));
ensureGreen(idxName, idxName + "2");
@@ -136,44 +133,43 @@ public void testIgnoreMalformed() throws Exception {
ensureGreen();
// test self crossing ccw poly not crossing dateline
- String polygonGeoJson = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .field("type", "Polygon")
- .startArray("coordinates")
- .startArray()
- .startArray()
- .value(176.0)
- .value(15.0)
- .endArray()
- .startArray()
- .value(-177.0)
- .value(10.0)
- .endArray()
- .startArray()
- .value(-177.0)
- .value(-10.0)
- .endArray()
- .startArray()
- .value(176.0)
- .value(-15.0)
- .endArray()
- .startArray()
- .value(-177.0)
- .value(15.0)
- .endArray()
- .startArray()
- .value(172.0)
- .value(0.0)
- .endArray()
- .startArray()
- .value(176.0)
- .value(15.0)
- .endArray()
- .endArray()
- .endArray()
- .endObject()
- );
+ String polygonGeoJson = XContentFactory.jsonBuilder()
+ .startObject()
+ .field("type", "Polygon")
+ .startArray("coordinates")
+ .startArray()
+ .startArray()
+ .value(176.0)
+ .value(15.0)
+ .endArray()
+ .startArray()
+ .value(-177.0)
+ .value(10.0)
+ .endArray()
+ .startArray()
+ .value(-177.0)
+ .value(-10.0)
+ .endArray()
+ .startArray()
+ .value(176.0)
+ .value(-15.0)
+ .endArray()
+ .startArray()
+ .value(-177.0)
+ .value(15.0)
+ .endArray()
+ .startArray()
+ .value(172.0)
+ .value(0.0)
+ .endArray()
+ .startArray()
+ .value(176.0)
+ .value(15.0)
+ .endArray()
+ .endArray()
+ .endArray()
+ .endObject()
+ .toString();
indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson));
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get();
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java
index 04d193aaea71a..2bf3394762621 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java
@@ -38,7 +38,6 @@
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.health.ClusterHealthStatus;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -270,7 +269,7 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception {
String indexName = "foo";
String aliasName = "foo_name";
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject());
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString();
client().admin().indices().prepareCreate(indexName).setMapping(mapping).get();
client().admin().indices().prepareAliases().addAlias(indexName, aliasName).get();
@@ -292,7 +291,7 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception {
}
public void testMoreLikeThisIssue2197() throws Exception {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject());
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString();
client().admin().indices().prepareCreate("foo").setMapping(mapping).get();
client().prepareIndex("foo")
.setId("1")
@@ -313,7 +312,7 @@ public void testMoreLikeThisIssue2197() throws Exception {
// Issue #2489
public void testMoreLikeWithCustomRouting() throws Exception {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject());
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString();
client().admin().indices().prepareCreate("foo").setMapping(mapping).get();
ensureGreen();
@@ -333,7 +332,7 @@ public void testMoreLikeWithCustomRouting() throws Exception {
// Issue #3039
public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject());
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString();
assertAcked(
prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).setMapping(
mapping
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java
index 0f08c537d74d8..9d0c30c5a488f 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java
@@ -90,7 +90,52 @@ public class AggregationProfilerIT extends OpenSearchIntegTestCase {
COLLECT + "_count",
POST_COLLECTION + "_count",
BUILD_AGGREGATION + "_count",
- REDUCE + "_count"
+ REDUCE + "_count",
+ INITIALIZE + "_start_time",
+ BUILD_LEAF_COLLECTOR + "_start_time",
+ COLLECT + "_start_time",
+ POST_COLLECTION + "_start_time",
+ BUILD_AGGREGATION + "_start_time",
+ REDUCE + "_start_time"
+ );
+
+ private static final Set CONCURRENT_SEARCH_BREAKDOWN_KEYS = Set.of(
+ INITIALIZE,
+ BUILD_LEAF_COLLECTOR,
+ COLLECT,
+ POST_COLLECTION,
+ BUILD_AGGREGATION,
+ REDUCE,
+ INITIALIZE + "_count",
+ BUILD_LEAF_COLLECTOR + "_count",
+ COLLECT + "_count",
+ POST_COLLECTION + "_count",
+ BUILD_AGGREGATION + "_count",
+ REDUCE + "_count",
+ "max_" + INITIALIZE,
+ "max_" + BUILD_LEAF_COLLECTOR,
+ "max_" + COLLECT,
+ "max_" + POST_COLLECTION,
+ "max_" + BUILD_AGGREGATION,
+ "max_" + REDUCE,
+ "min_" + INITIALIZE,
+ "min_" + BUILD_LEAF_COLLECTOR,
+ "min_" + COLLECT,
+ "min_" + POST_COLLECTION,
+ "min_" + BUILD_AGGREGATION,
+ "min_" + REDUCE,
+ "avg_" + INITIALIZE,
+ "avg_" + BUILD_LEAF_COLLECTOR,
+ "avg_" + COLLECT,
+ "avg_" + POST_COLLECTION,
+ "avg_" + BUILD_AGGREGATION,
+ "avg_" + REDUCE,
+ "max_" + BUILD_LEAF_COLLECTOR + "_count",
+ "max_" + COLLECT + "_count",
+ "min_" + BUILD_LEAF_COLLECTOR + "_count",
+ "min_" + COLLECT + "_count",
+ "avg_" + BUILD_LEAF_COLLECTOR + "_count",
+ "avg_" + COLLECT + "_count"
);
private static final String TOTAL_BUCKETS = "total_buckets";
@@ -169,7 +214,12 @@ public void testSimpleProfile() {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map breakdown = histoAggResult.getTimeBreakdown();
assertThat(breakdown, notNullValue());
- assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (histoAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(breakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(breakdown.get(INITIALIZE), greaterThan(0L));
assertThat(breakdown.get(COLLECT), greaterThan(0L));
assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L));
@@ -212,7 +262,12 @@ public void testMultiLevelProfile() {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map histoBreakdown = histoAggResult.getTimeBreakdown();
assertThat(histoBreakdown, notNullValue());
- assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (histoAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(histoBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
@@ -230,7 +285,12 @@ public void testMultiLevelProfile() {
assertThat(termsAggResult.getTime(), greaterThan(0L));
Map termsBreakdown = termsAggResult.getTimeBreakdown();
assertThat(termsBreakdown, notNullValue());
- assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (termsAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(termsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(termsBreakdown.get(COLLECT), greaterThan(0L));
assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
@@ -245,7 +305,12 @@ public void testMultiLevelProfile() {
assertThat(avgAggResult.getTime(), greaterThan(0L));
Map avgBreakdown = termsAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
- assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (avgAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
@@ -298,7 +363,12 @@ public void testMultiLevelProfileBreadthFirst() {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map histoBreakdown = histoAggResult.getTimeBreakdown();
assertThat(histoBreakdown, notNullValue());
- assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (histoAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(histoBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
@@ -316,7 +386,12 @@ public void testMultiLevelProfileBreadthFirst() {
assertThat(termsAggResult.getTime(), greaterThan(0L));
Map termsBreakdown = termsAggResult.getTimeBreakdown();
assertThat(termsBreakdown, notNullValue());
- assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (termsAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(termsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(termsBreakdown.get(COLLECT), greaterThan(0L));
assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
@@ -331,7 +406,12 @@ public void testMultiLevelProfileBreadthFirst() {
assertThat(avgAggResult.getTime(), greaterThan(0L));
Map avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
- assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (avgAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L));
@@ -369,7 +449,12 @@ public void testDiversifiedAggProfile() {
assertThat(diversifyAggResult.getTime(), greaterThan(0L));
Map diversifyBreakdown = diversifyAggResult.getTimeBreakdown();
assertThat(diversifyBreakdown, notNullValue());
- assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (diversifyAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(diversifyBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L));
@@ -386,7 +471,12 @@ public void testDiversifiedAggProfile() {
assertThat(maxAggResult.getTime(), greaterThan(0L));
Map maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
- assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (maxAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L));
@@ -439,7 +529,12 @@ public void testComplexProfile() {
assertThat(histoAggResult.getTime(), greaterThan(0L));
Map histoBreakdown = histoAggResult.getTimeBreakdown();
assertThat(histoBreakdown, notNullValue());
- assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (histoAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(histoBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(histoBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(histoBreakdown.get(COLLECT), greaterThan(0L));
@@ -462,7 +557,12 @@ public void testComplexProfile() {
assertThat(tagsAggResult.getTime(), greaterThan(0L));
Map tagsBreakdown = tagsAggResult.getTimeBreakdown();
assertThat(tagsBreakdown, notNullValue());
- assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (tagsAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(tagsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
@@ -482,7 +582,12 @@ public void testComplexProfile() {
assertThat(avgAggResult.getTime(), greaterThan(0L));
Map avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
- assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (avgAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
@@ -498,7 +603,12 @@ public void testComplexProfile() {
assertThat(maxAggResult.getTime(), greaterThan(0L));
Map maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
- assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (maxAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
@@ -514,7 +624,12 @@ public void testComplexProfile() {
assertThat(stringsAggResult.getTime(), greaterThan(0L));
Map stringsBreakdown = stringsAggResult.getTimeBreakdown();
assertThat(stringsBreakdown, notNullValue());
- assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (stringsAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(stringsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(stringsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L));
@@ -534,7 +649,12 @@ public void testComplexProfile() {
assertThat(avgAggResult.getTime(), greaterThan(0L));
avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
- assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (avgAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
@@ -550,7 +670,12 @@ public void testComplexProfile() {
assertThat(maxAggResult.getTime(), greaterThan(0L));
maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
- assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (maxAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
@@ -567,7 +692,12 @@ public void testComplexProfile() {
assertThat(tagsAggResult.getTime(), greaterThan(0L));
tagsBreakdown = tagsAggResult.getTimeBreakdown();
assertThat(tagsBreakdown, notNullValue());
- assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (tagsAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(tagsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L));
@@ -587,7 +717,12 @@ public void testComplexProfile() {
assertThat(avgAggResult.getTime(), greaterThan(0L));
avgBreakdown = avgAggResult.getTimeBreakdown();
assertThat(avgBreakdown, notNullValue());
- assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (avgAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(avgBreakdown.get(COLLECT), greaterThan(0L));
@@ -603,7 +738,12 @@ public void testComplexProfile() {
assertThat(maxAggResult.getTime(), greaterThan(0L));
maxBreakdown = maxAggResult.getTimeBreakdown();
assertThat(maxBreakdown, notNullValue());
- assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ if (maxAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS));
+ } else {
+ assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS));
+ }
assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L));
assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L));
assertThat(maxBreakdown.get(COLLECT), greaterThan(0L));
@@ -700,7 +840,12 @@ public void testGlobalAggWithStatsSubAggregatorProfile() {
assertThat(globalAggResult.getTime(), greaterThan(0L));
Map breakdown = globalAggResult.getTimeBreakdown();
assertThat(breakdown, notNullValue());
- assertEquals(BREAKDOWN_KEYS, breakdown.keySet());
+ if (globalAggResult.getMaxSliceTime() != null) {
+ // concurrent segment search enabled
+ assertEquals(CONCURRENT_SEARCH_BREAKDOWN_KEYS, breakdown.keySet());
+ } else {
+ assertEquals(BREAKDOWN_KEYS, breakdown.keySet());
+ }
assertThat(breakdown.get(INITIALIZE), greaterThan(0L));
assertThat(breakdown.get(COLLECT), greaterThan(0L));
assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L));
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java
index e1724d496fa91..a531fd0eeb5a9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java
@@ -35,7 +35,6 @@
import org.opensearch.action.explain.ExplainResponse;
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.json.JsonXContent;
@@ -140,14 +139,7 @@ public void testExists() throws Exception {
assertSearchResponse(resp);
try {
assertEquals(
- String.format(
- Locale.ROOT,
- "exists(%s, %d) mapping: %s response: %s",
- fieldName,
- count,
- Strings.toString(mapping),
- resp
- ),
+ String.format(Locale.ROOT, "exists(%s, %d) mapping: %s response: %s", fieldName, count, mapping.toString(), resp),
count,
resp.getHits().getTotalHits().value
);
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java
index 46b81ae2e750d..e2491600a9261 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java
@@ -41,7 +41,6 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -373,17 +372,16 @@ public void testLenientFlagBeingTooLenient() throws Exception {
}
public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, InterruptedException, IOException {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("location")
- .field("type", "text")
- .field("analyzer", "standard")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("location")
+ .field("type", "text")
+ .field("analyzer", "standard")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping);
mappingRequest.get();
@@ -420,17 +418,16 @@ public void testSimpleQueryStringOnIndexMetaField() throws Exception {
public void testEmptySimpleQueryStringWithAnalysis() throws Exception {
// https://github.com/elastic/elasticsearch/issues/18202
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("body")
- .field("type", "text")
- .field("analyzer", "stop")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("body")
+ .field("type", "text")
+ .field("analyzer", "stop")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping);
mappingRequest.get();
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java
index cc7e620f33216..dcc1136a6d267 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java
@@ -42,7 +42,6 @@
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchRequestBuilder;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.xcontent.XContentBuilder;
@@ -68,25 +67,24 @@
public class SearchSliceIT extends OpenSearchIntegTestCase {
private void setupIndex(int numDocs, int numberOfShards) throws IOException, ExecutionException, InterruptedException {
- String mapping = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("properties")
- .startObject("invalid_random_kw")
- .field("type", "keyword")
- .field("doc_values", "false")
- .endObject()
- .startObject("random_int")
- .field("type", "integer")
- .field("doc_values", "true")
- .endObject()
- .startObject("invalid_random_int")
- .field("type", "integer")
- .field("doc_values", "false")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("properties")
+ .startObject("invalid_random_kw")
+ .field("type", "keyword")
+ .field("doc_values", "false")
+ .endObject()
+ .startObject("random_int")
+ .field("type", "integer")
+ .field("doc_values", "true")
+ .endObject()
+ .startObject("invalid_random_int")
+ .field("type", "integer")
+ .field("doc_values", "false")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(
client().admin()
.indices()
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java
index 8ff0790e7cb48..2905ef97d521e 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java
@@ -35,7 +35,6 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.search.ShardSearchFailure;
-import org.opensearch.common.Strings;
import org.opensearch.common.geo.GeoPoint;
import org.opensearch.common.geo.GeoUtils;
import org.opensearch.index.fielddata.ScriptDocValues;
@@ -237,24 +236,23 @@ public void testSimpleSorts() throws Exception {
}
public void testSortMinValueScript() throws IOException {
- String mapping = Strings.toString(
- jsonBuilder().startObject()
- .startObject("properties")
- .startObject("lvalue")
- .field("type", "long")
- .endObject()
- .startObject("dvalue")
- .field("type", "double")
- .endObject()
- .startObject("svalue")
- .field("type", "keyword")
- .endObject()
- .startObject("gvalue")
- .field("type", "geo_point")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = jsonBuilder().startObject()
+ .startObject("properties")
+ .startObject("lvalue")
+ .field("type", "long")
+ .endObject()
+ .startObject("dvalue")
+ .field("type", "double")
+ .endObject()
+ .startObject("svalue")
+ .field("type", "keyword")
+ .endObject()
+ .startObject("gvalue")
+ .field("type", "geo_point")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(prepareCreate("test").setMapping(mapping));
ensureGreen();
@@ -351,18 +349,17 @@ public void testDocumentsWithNullValue() throws Exception {
// TODO: sort shouldn't fail when sort field is mapped dynamically
// We have to specify mapping explicitly because by the time search is performed dynamic mapping might not
// be propagated to all nodes yet and sort operation fail when the sort field is not defined
- String mapping = Strings.toString(
- jsonBuilder().startObject()
- .startObject("properties")
- .startObject("id")
- .field("type", "keyword")
- .endObject()
- .startObject("svalue")
- .field("type", "keyword")
- .endObject()
- .endObject()
- .endObject()
- );
+ String mapping = jsonBuilder().startObject()
+ .startObject("properties")
+ .startObject("id")
+ .field("type", "keyword")
+ .endObject()
+ .startObject("svalue")
+ .field("type", "keyword")
+ .endObject()
+ .endObject()
+ .endObject()
+ .toString();
assertAcked(prepareCreate("test").setMapping(mapping));
ensureGreen();
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java
index 8eb957733944d..764d40eb7bc8e 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java
@@ -78,7 +78,7 @@
import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
-import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList;
+import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAllSuccessful;
diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java
index 3ffd6ce66831e..4aaa5bf5af852 100644
--- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java
@@ -38,7 +38,6 @@
import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchRequestBuilder;
import org.opensearch.action.search.SearchResponse;
-import org.opensearch.common.Strings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -1305,14 +1304,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE
assertSuggestionSize(searchSuggest, 0, 10, "title");
// suggest with collate
- String filterString = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("match_phrase")
- .field("{{field}}", "{{suggestion}}")
- .endObject()
- .endObject()
- );
+ String filterString = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("match_phrase")
+ .field("{{field}}", "{{suggestion}}")
+ .endObject()
+ .endObject()
+ .toString();
PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString);
filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title"));
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest);
@@ -1325,9 +1323,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE
NumShards numShards = getNumShards("test");
// collate suggest with bad query
- String incorrectFilterString = Strings.toString(
- XContentFactory.jsonBuilder().startObject().startObject("test").field("title", "{{suggestion}}").endObject().endObject()
- );
+ String incorrectFilterString = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("test")
+ .field("title", "{{suggestion}}")
+ .endObject()
+ .endObject()
+ .toString();
PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString);
Map> namedSuggestion = new HashMap<>();
namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest);
@@ -1339,9 +1341,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE
}
// suggest with collation
- String filterStringAsFilter = Strings.toString(
- XContentFactory.jsonBuilder().startObject().startObject("match_phrase").field("title", "{{suggestion}}").endObject().endObject()
- );
+ String filterStringAsFilter = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("match_phrase")
+ .field("title", "{{suggestion}}")
+ .endObject()
+ .endObject()
+ .toString();
PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter);
searchSuggest = searchSuggest(
@@ -1352,9 +1358,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE
assertSuggestionSize(searchSuggest, 0, 2, "title");
// collate suggest with bad query
- String filterStr = Strings.toString(
- XContentFactory.jsonBuilder().startObject().startObject("pprefix").field("title", "{{suggestion}}").endObject().endObject()
- );
+ String filterStr = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("pprefix")
+ .field("title", "{{suggestion}}")
+ .endObject()
+ .endObject()
+ .toString();
suggest.collateQuery(filterStr);
try {
@@ -1365,14 +1375,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE
}
// collate script failure due to no additional params
- String collateWithParams = Strings.toString(
- XContentFactory.jsonBuilder()
- .startObject()
- .startObject("{{query_type}}")
- .field("{{query_field}}", "{{suggestion}}")
- .endObject()
- .endObject()
- );
+ String collateWithParams = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("{{query_type}}")
+ .field("{{query_field}}", "{{suggestion}}")
+ .endObject()
+ .endObject()
+ .toString();
try {
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion);
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java
index 3de982f89ac80..d19cb513ed38d 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java
@@ -54,6 +54,7 @@
import java.util.Collection;
import java.util.List;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
@@ -162,7 +163,8 @@ public void testCloneSnapshotIndex() throws Exception {
public void testCloneShallowSnapshotIndex() throws Exception {
disableRepoConsistencyCheck("This test uses remote store repository");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ final String remoteStoreRepoName = "remote-store-repo-name";
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(remoteStoreRepoName));
internalCluster().startDataOnlyNode();
final String snapshotRepoName = "snapshot-repo-name";
@@ -174,14 +176,13 @@ public void testCloneShallowSnapshotIndex() throws Exception {
createRepository(shallowSnapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy(shallowSnapshotRepoPath));
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
@@ -209,7 +210,10 @@ public void testCloneShallowSnapshotIndex() throws Exception {
public void testShallowCloneNameAvailability() throws Exception {
disableRepoConsistencyCheck("This test uses remote store repository");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS);
+ final String remoteStoreRepoName = "remote-store-repo-name";
+ internalCluster().startClusterManagerOnlyNode(
+ Settings.builder().put(LARGE_SNAPSHOT_POOL_SETTINGS).put(remoteStoreClusterSettings(remoteStoreRepoName)).build()
+ );
internalCluster().startDataOnlyNode();
final String shallowSnapshotRepoName = "shallow-snapshot-repo-name";
@@ -217,14 +221,13 @@ public void testShallowCloneNameAvailability() throws Exception {
createRepository(shallowSnapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy(shallowSnapshotRepoPath));
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
@@ -244,7 +247,8 @@ public void testShallowCloneNameAvailability() throws Exception {
public void testCloneAfterRepoShallowSettingEnabled() throws Exception {
disableRepoConsistencyCheck("This test uses remote store repository");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ final String remoteStoreRepoName = "remote-store-repo-name";
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(remoteStoreRepoName));
internalCluster().startDataOnlyNode();
final String snapshotRepoName = "snapshot-repo-name";
@@ -252,14 +256,13 @@ public void testCloneAfterRepoShallowSettingEnabled() throws Exception {
createRepository(snapshotRepoName, "fs", snapshotRepoPath);
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
@@ -281,7 +284,8 @@ public void testCloneAfterRepoShallowSettingEnabled() throws Exception {
public void testCloneAfterRepoShallowSettingDisabled() throws Exception {
disableRepoConsistencyCheck("This test uses remote store repository");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ final String remoteStoreRepoName = "remote-store-repo-name";
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(remoteStoreRepoName));
internalCluster().startDataOnlyNode();
final String snapshotRepoName = "snapshot-repo-name";
@@ -289,14 +293,13 @@ public void testCloneAfterRepoShallowSettingDisabled() throws Exception {
createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy(snapshotRepoPath));
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java
index 483d698f3c9a4..861279c4000d8 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java
@@ -41,7 +41,7 @@
import org.opensearch.cluster.metadata.Metadata;
import org.opensearch.cluster.metadata.RepositoriesMetadata;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.repositories.IndexId;
import org.opensearch.repositories.RepositoriesService;
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java
index 2688449294f3d..d38620723a8f4 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java
@@ -10,30 +10,39 @@
import org.opensearch.action.ActionFuture;
import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
+import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.Client;
+import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.UUIDs;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.FeatureFlags;
+import org.opensearch.remotestore.RemoteStoreBaseIntegTestCase;
import org.opensearch.test.FeatureFlagSetter;
import org.opensearch.test.OpenSearchIntegTestCase;
import java.nio.file.Path;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import java.util.Locale;
+import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
import static org.hamcrest.Matchers.is;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
+import static org.hamcrest.Matchers.comparesEqualTo;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0)
public class DeleteSnapshotIT extends AbstractSnapshotIntegTestCase {
+ private static final String REMOTE_REPO_NAME = "remote-store-repo-name";
+
public void testDeleteSnapshot() throws Exception {
disableRepoConsistencyCheck("Remote store repository is being used in the test");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME));
internalCluster().startDataOnlyNode();
final String snapshotRepoName = "snapshot-repo-name";
@@ -41,20 +50,19 @@ public void testDeleteSnapshot() throws Exception {
createRepository(snapshotRepoName, "fs", snapshotRepoPath);
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
+ createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath);
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
final String snapshot = "snapshot";
createFullSnapshot(snapshotRepoName, snapshot);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 0);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 0);
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 1);
assertAcked(startDeleteSnapshot(snapshotRepoName, snapshot).get());
@@ -64,32 +72,31 @@ public void testDeleteSnapshot() throws Exception {
public void testDeleteShallowCopySnapshot() throws Exception {
disableRepoConsistencyCheck("Remote store repository is being used in the test");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME));
internalCluster().startDataOnlyNode();
final String snapshotRepoName = "snapshot-repo-name";
createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy());
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
+ createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath);
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
final String shallowSnapshot = "shallow-snapshot";
createFullSnapshot(snapshotRepoName, shallowSnapshot);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 1);
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 1);
assertAcked(startDeleteSnapshot(snapshotRepoName, shallowSnapshot).get());
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 0);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 0);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 0);
}
// Deleting multiple shallow copy snapshots as part of single delete call with repo having only shallow copy snapshots.
@@ -97,23 +104,22 @@ public void testDeleteMultipleShallowCopySnapshotsCase1() throws Exception {
disableRepoConsistencyCheck("Remote store repository is being used in the test");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME));
internalCluster().startDataOnlyNode();
final Client clusterManagerClient = internalCluster().clusterManagerClient();
ensureStableCluster(2);
+ final Path remoteStoreRepoPath = randomRepoPath();
+ createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath);
+
final String snapshotRepoName = "snapshot-repo-name";
final Path snapshotRepoPath = randomRepoPath();
createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy(snapshotRepoPath));
final String testIndex = "index-test";
createIndexWithContent(testIndex);
- final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
-
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
@@ -122,7 +128,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase1() throws Exception {
List shallowCopySnapshots = createNSnapshots(snapshotRepoName, totalShallowCopySnapshotsCount);
List snapshotsToBeDeleted = shallowCopySnapshots.subList(0, randomIntBetween(2, totalShallowCopySnapshotsCount));
int tobeDeletedSnapshotsCount = snapshotsToBeDeleted.size();
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount);
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalShallowCopySnapshotsCount);
// Deleting subset of shallow copy snapshots
assertAcked(
@@ -132,7 +138,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase1() throws Exception {
.get()
);
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalShallowCopySnapshotsCount - tobeDeletedSnapshotsCount);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount
- tobeDeletedSnapshotsCount);
}
@@ -144,7 +150,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception {
disableRepoConsistencyCheck("Remote store repository is being used in the test");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME));
final String dataNode = internalCluster().startDataOnlyNode();
ensureStableCluster(2);
final String clusterManagerNode = internalCluster().getClusterManagerName();
@@ -156,11 +162,10 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception {
createIndexWithContent(testIndex);
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
+ createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath);
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
@@ -201,7 +206,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception {
int totalSnapshotsCount = totalFullCopySnapshotsCount + totalShallowCopySnapshotsCount;
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount);
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount);
// Deleting subset of shallow copy snapshots
assertAcked(
@@ -213,7 +218,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception {
totalSnapshotsCount -= tobeDeletedShallowCopySnapshotsCount;
totalShallowCopySnapshotsCount -= tobeDeletedShallowCopySnapshotsCount;
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount);
// Deleting subset of full copy snapshots
assertAcked(
@@ -224,7 +229,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception {
);
totalSnapshotsCount -= tobeDeletedFullCopySnapshotsCount;
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount);
}
// Deleting subset of shallow and full copy snapshots as part of single delete call and then deleting all snapshots in the repo.
@@ -233,7 +238,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception {
disableRepoConsistencyCheck("Remote store repository is being used in the test");
FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
- internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME));
internalCluster().startDataOnlyNode();
final Client clusterManagerClient = internalCluster().clusterManagerClient();
ensureStableCluster(2);
@@ -241,15 +246,15 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception {
final String snapshotRepoName = "snapshot-repo-name";
final Path snapshotRepoPath = randomRepoPath();
createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy(snapshotRepoPath));
- final String testIndex = "index-test";
- createIndexWithContent(testIndex);
final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
+ createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath);
+
+ final String testIndex = "index-test";
+ createIndexWithContent(testIndex);
final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
@@ -268,7 +273,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception {
int totalSnapshotsCount = totalFullCopySnapshotsCount + totalShallowCopySnapshotsCount;
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount);
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount);
// Deleting subset of shallow copy snapshots and full copy snapshots
assertAcked(
@@ -283,12 +288,77 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception {
totalSnapshotsCount -= (tobeDeletedShallowCopySnapshotsCount + tobeDeletedFullCopySnapshotsCount);
totalShallowCopySnapshotsCount -= tobeDeletedShallowCopySnapshotsCount;
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount);
// Deleting all the remaining snapshots
assertAcked(clusterManagerClient.admin().cluster().prepareDeleteSnapshot(snapshotRepoName, "*").get());
assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 0);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 0);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 0);
+ }
+
+ public void testRemoteStoreCleanupForDeletedIndex() throws Exception {
+ disableRepoConsistencyCheck("Remote store repository is being used in the test");
+ FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE);
+
+ internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME));
+ internalCluster().startDataOnlyNode();
+ final Client clusterManagerClient = internalCluster().clusterManagerClient();
+ ensureStableCluster(2);
+
+ final String snapshotRepoName = "snapshot-repo-name";
+ final Path snapshotRepoPath = randomRepoPath();
+ createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy(snapshotRepoPath));
+
+ final Path remoteStoreRepoPath = randomRepoPath();
+ createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath);
+
+ final String testIndex = "index-test";
+ createIndexWithContent(testIndex);
+
+ final String remoteStoreEnabledIndexName = "remote-index-1";
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
+ createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
+ indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10));
+
+ String indexUUID = client().admin()
+ .indices()
+ .prepareGetSettings(remoteStoreEnabledIndexName)
+ .get()
+ .getSetting(remoteStoreEnabledIndexName, IndexMetadata.SETTING_INDEX_UUID);
+
+ logger.info("--> create two remote index shallow snapshots");
+ List shallowCopySnapshots = createNSnapshots(snapshotRepoName, 2);
+
+ String[] lockFiles = getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME);
+ assert (lockFiles.length == 2) : "lock files are " + Arrays.toString(lockFiles);
+
+ // delete remote store index
+ assertAcked(client().admin().indices().prepareDelete(remoteStoreEnabledIndexName));
+
+ logger.info("--> delete snapshot 1");
+ AcknowledgedResponse deleteSnapshotResponse = clusterManagerClient.admin()
+ .cluster()
+ .prepareDeleteSnapshot(snapshotRepoName, shallowCopySnapshots.get(0))
+ .get();
+ assertAcked(deleteSnapshotResponse);
+
+ lockFiles = getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME, indexUUID);
+ assert (lockFiles.length == 1) : "lock files are " + Arrays.toString(lockFiles);
+
+ logger.info("--> delete snapshot 2");
+ deleteSnapshotResponse = clusterManagerClient.admin()
+ .cluster()
+ .prepareDeleteSnapshot(snapshotRepoName, shallowCopySnapshots.get(1))
+ .get();
+ assertAcked(deleteSnapshotResponse);
+
+ Path indexPath = Path.of(String.valueOf(remoteStoreRepoPath), indexUUID);
+ // Delete is async. Give time for it
+ assertBusy(() -> {
+ try {
+ assertThat(RemoteStoreBaseIntegTestCase.getFileCount(indexPath), comparesEqualTo(0));
+ } catch (Exception e) {}
+ }, 30, TimeUnit.SECONDS);
}
private List createNSnapshots(String repoName, int count) {
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java
new file mode 100644
index 0000000000000..d17410d8921ed
--- /dev/null
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java
@@ -0,0 +1,210 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.snapshots;
+
+import org.opensearch.action.ActionFuture;
+import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
+import org.opensearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStage;
+import org.opensearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStatus;
+import org.opensearch.action.admin.cluster.snapshots.status.SnapshotStatus;
+import org.opensearch.cluster.SnapshotsInProgress;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.FeatureFlags;
+import org.opensearch.threadpool.ThreadPool;
+
+import java.nio.file.Path;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings;
+
+public class RemoteIndexSnapshotStatusApiIT extends AbstractSnapshotIntegTestCase {
+
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put(ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING.getKey(), 0) // We have tests that check by-timestamp order
+ .put(FeatureFlags.REMOTE_STORE, "true")
+ .put(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL, "true")
+ .put(remoteStoreClusterSettings("remote-store-repo-name"))
+ .build();
+ }
+
+ public void testStatusAPICallForShallowCopySnapshot() throws Exception {
+ disableRepoConsistencyCheck("Remote store repository is being used for the test");
+ internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startDataOnlyNode();
+
+ final String snapshotRepoName = "snapshot-repo-name";
+ createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy());
+
+ final Path remoteStoreRepoPath = randomRepoPath();
+ final String remoteStoreRepoName = "remote-store-repo-name";
+ createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
+
+ final String remoteStoreEnabledIndexName = "remote-index-1";
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
+ createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
+ ensureGreen();
+
+ logger.info("--> indexing some data");
+ for (int i = 0; i < 100; i++) {
+ index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i);
+ }
+ refresh();
+
+ final String snapshot = "snapshot";
+ createFullSnapshot(snapshotRepoName, snapshot);
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1);
+
+ final SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, snapshot);
+ assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS));
+
+ // Validating that the incremental file count and incremental file size is zero for shallow copy
+ final SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName);
+ assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE));
+ assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), greaterThan(0));
+ assertThat(shallowSnapshotShardState.getStats().getTotalSize(), greaterThan(0L));
+ assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), is(0));
+ assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), is(0L));
+ }
+
+ public void testStatusAPIStatsForBackToBackShallowSnapshot() throws Exception {
+ disableRepoConsistencyCheck("Remote store repository is being used for the test");
+ internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startDataOnlyNode();
+
+ final String snapshotRepoName = "snapshot-repo-name";
+ createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy());
+
+ final Path remoteStoreRepoPath = randomRepoPath();
+ final String remoteStoreRepoName = "remote-store-repo-name";
+ createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
+
+ final String remoteStoreEnabledIndexName = "remote-index-1";
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
+ createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
+ ensureGreen();
+
+ logger.info("--> indexing some data");
+ for (int i = 0; i < 100; i++) {
+ index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i);
+ }
+ refresh();
+
+ createFullSnapshot(snapshotRepoName, "test-snap-1");
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1);
+
+ SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-1");
+ assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS));
+
+ SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName);
+ assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE));
+ final int totalFileCount = shallowSnapshotShardState.getStats().getTotalFileCount();
+ final long totalSize = shallowSnapshotShardState.getStats().getTotalSize();
+ final int incrementalFileCount = shallowSnapshotShardState.getStats().getIncrementalFileCount();
+ final long incrementalSize = shallowSnapshotShardState.getStats().getIncrementalSize();
+
+ createFullSnapshot(snapshotRepoName, "test-snap-2");
+ assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 2);
+
+ snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-2");
+ assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS));
+ shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName);
+ assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), equalTo(totalFileCount));
+ assertThat(shallowSnapshotShardState.getStats().getTotalSize(), equalTo(totalSize));
+ assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), equalTo(incrementalFileCount));
+ assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), equalTo(incrementalSize));
+ }
+
+ public void testStatusAPICallInProgressShallowSnapshot() throws Exception {
+ disableRepoConsistencyCheck("Remote store repository is being used for the test");
+ internalCluster().startClusterManagerOnlyNode();
+ internalCluster().startDataOnlyNode();
+
+ final String snapshotRepoName = "snapshot-repo-name";
+ createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy().put("block_on_data", true));
+
+ final Path remoteStoreRepoPath = randomRepoPath();
+ final String remoteStoreRepoName = "remote-store-repo-name";
+ createRepository(remoteStoreRepoName, "mock", remoteStoreRepoPath);
+
+ final String remoteStoreEnabledIndexName = "remote-index-1";
+ final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings();
+ createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
+ ensureGreen();
+
+ logger.info("--> indexing some data");
+ for (int i = 0; i < 100; i++) {
+ index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i);
+ }
+ refresh();
+
+ logger.info("--> snapshot");
+ ActionFuture createSnapshotResponseActionFuture = startFullSnapshot(snapshotRepoName, "test-snap");
+
+ logger.info("--> wait for data nodes to get blocked");
+ awaitNumberOfSnapshotsInProgress(1);
+ assertEquals(
+ SnapshotsInProgress.State.STARTED,
+ client().admin()
+ .cluster()
+ .prepareSnapshotStatus(snapshotRepoName)
+ .setSnapshots("test-snap")
+ .get()
+ .getSnapshots()
+ .get(0)
+ .getState()
+ );
+
+ logger.info("--> unblock all data nodes");
+ unblockAllDataNodes(snapshotRepoName);
+
+ logger.info("--> wait for snapshot to finish");
+ createSnapshotResponseActionFuture.actionGet();
+ }
+
+ private static SnapshotIndexShardStatus stateFirstShard(SnapshotStatus snapshotStatus, String indexName) {
+ return snapshotStatus.getIndices().get(indexName).getShards().get(0);
+ }
+
+ private static SnapshotStatus getSnapshotStatus(String repoName, String snapshotName) {
+ try {
+ return client().admin().cluster().prepareSnapshotStatus(repoName).setSnapshots(snapshotName).get().getSnapshots().get(0);
+ } catch (SnapshotMissingException e) {
+ throw new AssertionError(e);
+ }
+ }
+}
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java
index f6df7cccf96f7..bbca3bdc417c7 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java
@@ -44,7 +44,7 @@
import org.opensearch.cluster.metadata.RepositoryMetadata;
import org.opensearch.core.util.FileSystemUtils;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.repositories.RepositoriesService;
import org.opensearch.repositories.RepositoryException;
import org.opensearch.repositories.RepositoryVerificationException;
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java
index dbd96a7fd109f..0274ca874c7b9 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java
@@ -33,45 +33,30 @@
package org.opensearch.snapshots;
import org.opensearch.action.ActionFuture;
-import org.opensearch.action.DocWriteResponse;
-import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest;
import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
-import org.opensearch.action.admin.indices.get.GetIndexRequest;
-import org.opensearch.action.admin.indices.get.GetIndexResponse;
import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.opensearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder;
import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
-import org.opensearch.action.delete.DeleteResponse;
import org.opensearch.action.index.IndexRequestBuilder;
-import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.client.Client;
-import org.opensearch.client.Requests;
import org.opensearch.cluster.block.ClusterBlocks;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.metadata.MappingMetadata;
-import org.opensearch.common.io.PathUtils;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.FeatureFlags;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.rest.RestStatus;
-import org.opensearch.index.IndexSettings;
import org.opensearch.indices.InvalidIndexNameException;
-import org.opensearch.indices.replication.common.ReplicationType;
import org.opensearch.repositories.RepositoriesService;
-import org.opensearch.test.InternalTestCluster;
-
-import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.Arrays;
-import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@@ -86,8 +71,6 @@
import static org.hamcrest.Matchers.nullValue;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
-import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED;
-import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY;
import static org.opensearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING;
import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING;
import static org.opensearch.index.query.QueryBuilders.matchQuery;
@@ -174,486 +157,6 @@ public void testParallelRestoreOperations() {
assertThat(client.prepareGet(restoredIndexName2, docId2).get().isExists(), equalTo(true));
}
- public void testRestoreRemoteStoreIndicesWithRemoteTranslog() throws IOException, ExecutionException, InterruptedException {
- testRestoreOperationsShallowCopyEnabled();
- }
-
- public void testRestoreOperationsShallowCopyEnabled() throws IOException, ExecutionException, InterruptedException {
- String clusterManagerNode = internalCluster().startClusterManagerOnlyNode();
- String primary = internalCluster().startDataOnlyNode();
- String indexName1 = "testindex1";
- String indexName2 = "testindex2";
- String snapshotRepoName = "test-restore-snapshot-repo";
- String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX;
- String snapshotName1 = "test-restore-snapshot1";
- String snapshotName2 = "test-restore-snapshot2";
- Path absolutePath1 = randomRepoPath().toAbsolutePath();
- Path absolutePath2 = randomRepoPath().toAbsolutePath();
- logger.info("Snapshot Path [{}]", absolutePath1);
- logger.info("Remote Store Repo Path [{}]", absolutePath2);
- String restoredIndexName1 = indexName1 + "-restored";
- String restoredIndexName1Seg = indexName1 + "-restored-seg";
- String restoredIndexName1Doc = indexName1 + "-restored-doc";
- String restoredIndexName2 = indexName2 + "-restored";
-
- createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, true));
- createRepository(remoteStoreRepoName, "fs", absolutePath2);
-
- Client client = client();
- Settings indexSettings = getIndexSettings(true, remoteStoreRepoName, 1, 0).build();
- createIndex(indexName1, indexSettings);
-
- Settings indexSettings2 = getIndexSettings(false, null, 1, 0).build();
- createIndex(indexName2, indexSettings2);
-
- final int numDocsInIndex1 = 5;
- final int numDocsInIndex2 = 6;
- indexDocuments(client, indexName1, numDocsInIndex1);
- indexDocuments(client, indexName2, numDocsInIndex2);
- ensureGreen(indexName1, indexName2);
-
- internalCluster().startDataOnlyNode();
- logger.info("--> snapshot");
- CreateSnapshotResponse createSnapshotResponse = client.admin()
- .cluster()
- .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(true)
- .setIndices(indexName1, indexName2)
- .get();
- assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
- assertThat(
- createSnapshotResponse.getSnapshotInfo().successfulShards(),
- equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
- );
- assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
-
- updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false));
- CreateSnapshotResponse createSnapshotResponse2 = client.admin()
- .cluster()
- .prepareCreateSnapshot(snapshotRepoName, snapshotName2)
- .setWaitForCompletion(true)
- .setIndices(indexName1, indexName2)
- .get();
- assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0));
- assertThat(
- createSnapshotResponse2.getSnapshotInfo().successfulShards(),
- equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards())
- );
- assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
-
- DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet();
- assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED);
- indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5));
- ensureGreen(indexName1);
-
- RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(false)
- .setIndices(indexName1)
- .setRenamePattern(indexName1)
- .setRenameReplacement(restoredIndexName1)
- .get();
- RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName2)
- .setWaitForCompletion(false)
- .setIndices(indexName2)
- .setRenamePattern(indexName2)
- .setRenameReplacement(restoredIndexName2)
- .get();
- assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED);
- assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED);
- ensureGreen(restoredIndexName1, restoredIndexName2);
- assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1);
- assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2);
-
- // deleting data for restoredIndexName1 and restoring from remote store.
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary));
- ensureRed(restoredIndexName1);
- // Re-initialize client to make sure we are not using client from stopped node.
- client = client(clusterManagerNode);
- assertAcked(client.admin().indices().prepareClose(restoredIndexName1));
- client.admin()
- .cluster()
- .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(restoredIndexName1), PlainActionFuture.newFuture());
- ensureYellowAndNoInitializingShards(restoredIndexName1);
- ensureGreen(restoredIndexName1);
- assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1);
- // indexing some new docs and validating
- indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2);
- ensureGreen(restoredIndexName1);
- assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2);
-
- // restore index as seg rep enabled with remote store and remote translog disabled
- RestoreSnapshotResponse restoreSnapshotResponse3 = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(false)
- .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED)
- .setIndices(indexName1)
- .setRenamePattern(indexName1)
- .setRenameReplacement(restoredIndexName1Seg)
- .get();
- assertEquals(restoreSnapshotResponse3.status(), RestStatus.ACCEPTED);
- ensureGreen(restoredIndexName1Seg);
-
- GetIndexResponse getIndexResponse = client.admin()
- .indices()
- .getIndex(new GetIndexRequest().indices(restoredIndexName1Seg).includeDefaults(true))
- .get();
- indexSettings = getIndexResponse.settings().get(restoredIndexName1Seg);
- assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED));
- assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null));
- assertEquals(ReplicationType.SEGMENT.toString(), indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE));
- assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1);
- // indexing some new docs and validating
- indexDocuments(client, restoredIndexName1Seg, numDocsInIndex1, numDocsInIndex1 + 2);
- ensureGreen(restoredIndexName1Seg);
- assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1 + 2);
-
- // restore index as doc rep based from shallow copy snapshot
- RestoreSnapshotResponse restoreSnapshotResponse4 = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(false)
- .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, IndexMetadata.SETTING_REPLICATION_TYPE)
- .setIndices(indexName1)
- .setRenamePattern(indexName1)
- .setRenameReplacement(restoredIndexName1Doc)
- .get();
- assertEquals(restoreSnapshotResponse4.status(), RestStatus.ACCEPTED);
- ensureGreen(restoredIndexName1Doc);
-
- getIndexResponse = client.admin()
- .indices()
- .getIndex(new GetIndexRequest().indices(restoredIndexName1Doc).includeDefaults(true))
- .get();
- indexSettings = getIndexResponse.settings().get(restoredIndexName1Doc);
- assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED));
- assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null));
- assertNull(indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE));
- assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1);
- // indexing some new docs and validating
- indexDocuments(client, restoredIndexName1Doc, numDocsInIndex1, numDocsInIndex1 + 2);
- ensureGreen(restoredIndexName1Doc);
- assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1 + 2);
- }
-
- public void testRestoreInSameRemoteStoreEnabledIndex() throws IOException {
- String clusterManagerNode = internalCluster().startClusterManagerOnlyNode();
- String primary = internalCluster().startDataOnlyNode();
- String indexName1 = "testindex1";
- String indexName2 = "testindex2";
- String snapshotRepoName = "test-restore-snapshot-repo";
- String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX;
- String snapshotName1 = "test-restore-snapshot1";
- String snapshotName2 = "test-restore-snapshot2";
- Path absolutePath1 = randomRepoPath().toAbsolutePath();
- Path absolutePath2 = randomRepoPath().toAbsolutePath();
- logger.info("Snapshot Path [{}]", absolutePath1);
- logger.info("Remote Store Repo Path [{}]", absolutePath2);
- String restoredIndexName2 = indexName2 + "-restored";
-
- boolean enableShallowCopy = randomBoolean();
- createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, enableShallowCopy));
- createRepository(remoteStoreRepoName, "fs", absolutePath2);
-
- Client client = client();
- Settings indexSettings = getIndexSettings(true, remoteStoreRepoName, 1, 0).build();
- createIndex(indexName1, indexSettings);
-
- Settings indexSettings2 = getIndexSettings(false, null, 1, 0).build();
- createIndex(indexName2, indexSettings2);
-
- final int numDocsInIndex1 = 5;
- final int numDocsInIndex2 = 6;
- indexDocuments(client, indexName1, numDocsInIndex1);
- indexDocuments(client, indexName2, numDocsInIndex2);
- ensureGreen(indexName1, indexName2);
-
- internalCluster().startDataOnlyNode();
- logger.info("--> snapshot");
- CreateSnapshotResponse createSnapshotResponse = client.admin()
- .cluster()
- .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(true)
- .setIndices(indexName1, indexName2)
- .get();
- assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
- assertThat(
- createSnapshotResponse.getSnapshotInfo().successfulShards(),
- equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
- );
- assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
-
- updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false));
- CreateSnapshotResponse createSnapshotResponse2 = client.admin()
- .cluster()
- .prepareCreateSnapshot(snapshotRepoName, snapshotName2)
- .setWaitForCompletion(true)
- .setIndices(indexName1, indexName2)
- .get();
- assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0));
- assertThat(
- createSnapshotResponse2.getSnapshotInfo().successfulShards(),
- equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards())
- );
- assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
-
- DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet();
- assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED);
- indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5));
- ensureGreen(indexName1);
-
- assertAcked(client().admin().indices().prepareClose(indexName1));
-
- RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(false)
- .setIndices(indexName1)
- .get();
- RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName2)
- .setWaitForCompletion(false)
- .setIndices(indexName2)
- .setRenamePattern(indexName2)
- .setRenameReplacement(restoredIndexName2)
- .get();
- assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED);
- assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED);
- ensureGreen(indexName1, restoredIndexName2);
- assertDocsPresentInIndex(client, indexName1, numDocsInIndex1);
- assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2);
-
- // deleting data for restoredIndexName1 and restoring from remote store.
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary));
- ensureRed(indexName1);
- // Re-initialize client to make sure we are not using client from stopped node.
- client = client(clusterManagerNode);
- assertAcked(client.admin().indices().prepareClose(indexName1));
- client.admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indexName1), PlainActionFuture.newFuture());
- ensureYellowAndNoInitializingShards(indexName1);
- ensureGreen(indexName1);
- assertDocsPresentInIndex(client(), indexName1, numDocsInIndex1);
- // indexing some new docs and validating
- indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + 2);
- ensureGreen(indexName1);
- assertDocsPresentInIndex(client, indexName1, numDocsInIndex1 + 2);
- }
-
- public void testRestoreShallowCopySnapshotWithDifferentRepo() throws IOException {
- String clusterManagerNode = internalCluster().startClusterManagerOnlyNode();
- String primary = internalCluster().startDataOnlyNode();
- String indexName1 = "testindex1";
- String indexName2 = "testindex2";
- String snapshotRepoName = "test-restore-snapshot-repo";
- String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX;
- String remoteStoreRepo2Name = "test-rs-repo-2" + TEST_REMOTE_STORE_REPO_SUFFIX;
- String snapshotName1 = "test-restore-snapshot1";
- Path absolutePath1 = randomRepoPath().toAbsolutePath();
- Path absolutePath2 = randomRepoPath().toAbsolutePath();
- Path absolutePath3 = randomRepoPath().toAbsolutePath();
- String restoredIndexName1 = indexName1 + "-restored";
-
- createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false));
- createRepository(remoteStoreRepoName, "fs", absolutePath2);
- createRepository(remoteStoreRepo2Name, "fs", absolutePath3);
-
- Client client = client();
- Settings indexSettings = getIndexSettings(true, remoteStoreRepoName, 1, 0).build();
- createIndex(indexName1, indexSettings);
-
- Settings indexSettings2 = getIndexSettings(false, null, 1, 0).build();
- createIndex(indexName2, indexSettings2);
-
- final int numDocsInIndex1 = 5;
- final int numDocsInIndex2 = 6;
- indexDocuments(client, indexName1, numDocsInIndex1);
- indexDocuments(client, indexName2, numDocsInIndex2);
- ensureGreen(indexName1, indexName2);
-
- internalCluster().startDataOnlyNode();
-
- logger.info("--> snapshot");
- CreateSnapshotResponse createSnapshotResponse = client.admin()
- .cluster()
- .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(true)
- .setIndices(indexName1, indexName2)
- .get();
- assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
- assertThat(
- createSnapshotResponse.getSnapshotInfo().successfulShards(),
- equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
- );
- assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
-
- Settings remoteStoreIndexSettings = Settings.builder()
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo2Name)
- .build();
- // restore index as a remote store index with different remote store repo
- RestoreSnapshotResponse restoreSnapshotResponse = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(false)
- .setIndexSettings(remoteStoreIndexSettings)
- .setIndices(indexName1)
- .setRenamePattern(indexName1)
- .setRenameReplacement(restoredIndexName1)
- .get();
- assertEquals(restoreSnapshotResponse.status(), RestStatus.ACCEPTED);
- ensureGreen(restoredIndexName1);
- assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1);
-
- // deleting data for restoredIndexName1 and restoring from remote store.
- internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary));
- // Re-initialize client to make sure we are not using client from stopped node.
- client = client(clusterManagerNode);
- assertAcked(client.admin().indices().prepareClose(restoredIndexName1));
- client.admin()
- .cluster()
- .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(restoredIndexName1), PlainActionFuture.newFuture());
- ensureYellowAndNoInitializingShards(restoredIndexName1);
- ensureGreen(restoredIndexName1);
- // indexing some new docs and validating
- assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1);
- indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2);
- ensureGreen(restoredIndexName1);
- assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2);
- }
-
- private Settings.Builder getIndexSettings(boolean enableRemoteStore, String remoteStoreRepo, int numOfShards, int numOfReplicas) {
- Settings.Builder settingsBuilder = Settings.builder()
- .put(super.indexSettings())
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numOfShards)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numOfReplicas);
- if (enableRemoteStore) {
- settingsBuilder.put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, remoteStoreRepo)
- .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s")
- .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT);
- }
- return settingsBuilder;
- }
-
- public void testRestoreShallowSnapshotRepositoryOverriden() throws ExecutionException, InterruptedException {
- String indexName1 = "testindex1";
- String snapshotRepoName = "test-restore-snapshot-repo";
- String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX;
- String remoteStoreRepoNameUpdated = "test-rs-repo-updated" + TEST_REMOTE_STORE_REPO_SUFFIX;
- String snapshotName1 = "test-restore-snapshot1";
- Path absolutePath1 = randomRepoPath().toAbsolutePath();
- Path absolutePath2 = randomRepoPath().toAbsolutePath();
- Path absolutePath3 = randomRepoPath().toAbsolutePath();
- String[] pathTokens = absolutePath1.toString().split("/");
- String basePath = pathTokens[pathTokens.length - 1];
- Arrays.copyOf(pathTokens, pathTokens.length - 1);
- Path location = PathUtils.get(String.join("/", pathTokens));
- pathTokens = absolutePath2.toString().split("/");
- String basePath2 = pathTokens[pathTokens.length - 1];
- Arrays.copyOf(pathTokens, pathTokens.length - 1);
- Path location2 = PathUtils.get(String.join("/", pathTokens));
- logger.info("Path 1 [{}]", absolutePath1);
- logger.info("Path 2 [{}]", absolutePath2);
- logger.info("Path 3 [{}]", absolutePath3);
- String restoredIndexName1 = indexName1 + "-restored";
-
- createRepository(snapshotRepoName, "fs", getRepositorySettings(location, basePath, true));
- createRepository(remoteStoreRepoName, "fs", absolutePath3);
-
- Client client = client();
- Settings indexSettings = Settings.builder()
- .put(super.indexSettings())
- .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true)
- .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepoName)
- .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, remoteStoreRepoName)
- .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s")
- .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT)
- .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
- .build();
- createIndex(indexName1, indexSettings);
-
- int numDocsInIndex1 = randomIntBetween(2, 5);
- indexDocuments(client, indexName1, numDocsInIndex1);
-
- ensureGreen(indexName1);
-
- logger.info("--> snapshot");
- CreateSnapshotResponse createSnapshotResponse = client.admin()
- .cluster()
- .prepareCreateSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(true)
- .setIndices(indexName1)
- .get();
- assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
- assertThat(
- createSnapshotResponse.getSnapshotInfo().successfulShards(),
- equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())
- );
- assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS));
-
- createRepository(remoteStoreRepoName, "fs", absolutePath2);
-
- RestoreSnapshotResponse restoreSnapshotResponse = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(true)
- .setIndices(indexName1)
- .setRenamePattern(indexName1)
- .setRenameReplacement(restoredIndexName1)
- .get();
-
- assertTrue(restoreSnapshotResponse.getRestoreInfo().failedShards() > 0);
-
- ensureRed(restoredIndexName1);
-
- client().admin().indices().close(Requests.closeIndexRequest(restoredIndexName1)).get();
- createRepository(remoteStoreRepoNameUpdated, "fs", absolutePath3);
- RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin()
- .cluster()
- .prepareRestoreSnapshot(snapshotRepoName, snapshotName1)
- .setWaitForCompletion(true)
- .setIndices(indexName1)
- .setRenamePattern(indexName1)
- .setRenameReplacement(restoredIndexName1)
- .setSourceRemoteStoreRepository(remoteStoreRepoNameUpdated)
- .get();
-
- assertTrue(restoreSnapshotResponse2.getRestoreInfo().failedShards() == 0);
- ensureGreen(restoredIndexName1);
- assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1);
-
- // indexing some new docs and validating
- indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2);
- ensureGreen(restoredIndexName1);
- assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2);
- }
-
- private void indexDocuments(Client client, String indexName, int numOfDocs) {
- indexDocuments(client, indexName, 0, numOfDocs);
- }
-
- private void indexDocuments(Client client, String indexName, int fromId, int toId) {
- for (int i = fromId; i < toId; i++) {
- String id = Integer.toString(i);
- client.prepareIndex(indexName).setId(id).setSource("text", "sometext").get();
- }
- client.admin().indices().prepareFlush(indexName).get();
- }
-
- private void assertDocsPresentInIndex(Client client, String indexName, int numOfDocs) {
- for (int i = 0; i < numOfDocs; i++) {
- String id = Integer.toString(i);
- logger.info("checking for index " + indexName + " with docId" + id);
- assertTrue("doc with id" + id + " is not present for index " + indexName, client.prepareGet(indexName, id).get().isExists());
- }
- }
-
public void testParallelRestoreOperationsFromSingleSnapshot() throws Exception {
String indexName1 = "testindex1";
String indexName2 = "testindex2";
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java
index 300e1db09b4c5..2a9f8cf67a607 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java
@@ -30,7 +30,7 @@
import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.common.io.PathUtils;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.core.index.Index;
import org.opensearch.index.IndexModule;
import org.opensearch.index.IndexNotFoundException;
@@ -56,7 +56,7 @@
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS;
-import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList;
+import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList;
@ThreadLeakFilters(filters = CleanerDaemonThreadLeakFilter.class)
public final class SearchableSnapshotIT extends AbstractSnapshotIntegTestCase {
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java
index 4bba25039d376..420df6b4c34c8 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java
@@ -66,7 +66,7 @@
import org.opensearch.cluster.routing.UnassignedInfo;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.util.BytesRefUtils;
import org.opensearch.core.index.Index;
diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java
index c22dd90cc930b..e02a5b95da400 100644
--- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java
@@ -46,7 +46,7 @@
import org.opensearch.client.Client;
import org.opensearch.cluster.SnapshotsInProgress;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeUnit;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.FeatureFlags;
import org.opensearch.common.util.io.IOUtils;
@@ -112,7 +112,7 @@ public void testStatusApiConsistency() {
assertEquals(snStatus.getStats().getTime(), snapshotInfo.endTime() - snapshotInfo.startTime());
}
- public void testStatusAPICallForShallowCopySnapshot() throws Exception {
+ public void testStatusAPICallForShallowCopySnapshot() {
disableRepoConsistencyCheck("Remote store repository is being used for the test");
internalCluster().startClusterManagerOnlyNode();
internalCluster().startDataOnlyNode();
@@ -120,10 +120,6 @@ public void testStatusAPICallForShallowCopySnapshot() throws Exception {
final String snapshotRepoName = "snapshot-repo-name";
createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy());
- final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
-
final String indexName = "index-1";
createIndex(indexName);
ensureGreen();
@@ -133,20 +129,8 @@ public void testStatusAPICallForShallowCopySnapshot() throws Exception {
}
refresh();
- final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
- createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
- ensureGreen();
-
- logger.info("--> indexing some data");
- for (int i = 0; i < 100; i++) {
- index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i);
- }
- refresh();
-
final String snapshot = "snapshot";
createFullSnapshot(snapshotRepoName, snapshot);
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1);
final SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, snapshot);
assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS));
@@ -157,14 +141,6 @@ public void testStatusAPICallForShallowCopySnapshot() throws Exception {
assertThat(snapshotShardState.getStats().getTotalSize(), greaterThan(0L));
assertThat(snapshotShardState.getStats().getIncrementalFileCount(), greaterThan(0));
assertThat(snapshotShardState.getStats().getIncrementalSize(), greaterThan(0L));
-
- // Validating that the incremental file count and incremental file size is zero for shallow copy
- final SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName);
- assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE));
- assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), greaterThan(0));
- assertThat(shallowSnapshotShardState.getStats().getTotalSize(), greaterThan(0L));
- assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), is(0));
- assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), is(0L));
}
public void testStatusAPICallInProgressSnapshot() throws Exception {
@@ -245,63 +221,6 @@ public void testExceptionOnMissingShardLevelSnapBlob() throws IOException {
);
}
- public void testStatusAPIStatsForBackToBackShallowSnapshot() throws Exception {
- disableRepoConsistencyCheck("Remote store repository is being used for the test");
- internalCluster().startClusterManagerOnlyNode();
- internalCluster().startDataOnlyNode();
-
- final String snapshotRepoName = "snapshot-repo-name";
- createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy());
-
- final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath);
-
- final String indexName = "index-1";
- createIndex(indexName);
- ensureGreen();
- logger.info("--> indexing some data");
- for (int i = 0; i < 100; i++) {
- index(indexName, "_doc", Integer.toString(i), "foo", "bar" + i);
- }
- refresh();
-
- final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
- createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
- ensureGreen();
-
- logger.info("--> indexing some data");
- for (int i = 0; i < 100; i++) {
- index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i);
- }
- refresh();
-
- createFullSnapshot(snapshotRepoName, "test-snap-1");
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1);
-
- SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-1");
- assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS));
-
- SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName);
- assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE));
- final int totalFileCount = shallowSnapshotShardState.getStats().getTotalFileCount();
- final long totalSize = shallowSnapshotShardState.getStats().getTotalSize();
- final int incrementalFileCount = shallowSnapshotShardState.getStats().getIncrementalFileCount();
- final long incrementalSize = shallowSnapshotShardState.getStats().getIncrementalSize();
-
- createFullSnapshot(snapshotRepoName, "test-snap-2");
- assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 2);
-
- snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-2");
- assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS));
- shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName);
- assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), equalTo(totalFileCount));
- assertThat(shallowSnapshotShardState.getStats().getTotalSize(), equalTo(totalSize));
- assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), equalTo(incrementalFileCount));
- assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), equalTo(incrementalSize));
- }
-
public void testGetSnapshotsWithoutIndices() throws Exception {
createRepository("test-repo", "fs");
@@ -441,17 +360,12 @@ public void testSnapshotStatusOnFailedSnapshot() throws Exception {
}
public void testStatusAPICallInProgressShallowSnapshot() throws Exception {
- disableRepoConsistencyCheck("Remote store repository is being used for the test");
internalCluster().startClusterManagerOnlyNode();
internalCluster().startDataOnlyNode();
final String snapshotRepoName = "snapshot-repo-name";
createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy().put("block_on_data", true));
- final Path remoteStoreRepoPath = randomRepoPath();
- final String remoteStoreRepoName = "remote-store-repo-name";
- createRepository(remoteStoreRepoName, "mock", remoteStoreRepoPath);
-
final String indexName = "index-1";
createIndex(indexName);
ensureGreen();
@@ -461,17 +375,6 @@ public void testStatusAPICallInProgressShallowSnapshot() throws Exception {
}
refresh();
- final String remoteStoreEnabledIndexName = "remote-index-1";
- final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName);
- createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings);
- ensureGreen();
-
- logger.info("--> indexing some data");
- for (int i = 0; i < 100; i++) {
- index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i);
- }
- refresh();
-
logger.info("--> snapshot");
ActionFuture createSnapshotResponseActionFuture = startFullSnapshot(snapshotRepoName, "test-snap");
diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java
index 3a36a6ff103e0..9d276cfe283eb 100644
--- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java
+++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java
@@ -42,6 +42,7 @@
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
+import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.TermQuery;
@@ -93,11 +94,12 @@ public BlendedTermQuery(Term[] terms, float[] boosts) {
}
@Override
- public Query rewrite(IndexReader reader) throws IOException {
- Query rewritten = super.rewrite(reader);
+ public Query rewrite(IndexSearcher searcher) throws IOException {
+ Query rewritten = super.rewrite(searcher);
if (rewritten != this) {
return rewritten;
}
+ IndexReader reader = searcher.getIndexReader();
IndexReaderContext context = reader.getContext();
TermStates[] ctx = new TermStates[terms.length];
int[] docFreqs = new int[ctx.length];
diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java
index fe26c313d72b2..e93e5cdcc3f7b 100644
--- a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java
+++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java
@@ -39,7 +39,7 @@
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.util.PriorityQueue;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashSet;
diff --git a/server/src/main/java/org/opensearch/OpenSearchServerException.java b/server/src/main/java/org/opensearch/OpenSearchServerException.java
index d53164427debf..b8c6d1e78d25a 100644
--- a/server/src/main/java/org/opensearch/OpenSearchServerException.java
+++ b/server/src/main/java/org/opensearch/OpenSearchServerException.java
@@ -288,14 +288,6 @@ public static void registerExceptions() {
UNKNOWN_VERSION_ADDED
)
);
- registerExceptionHandle(
- new OpenSearchExceptionHandle(
- org.opensearch.OpenSearchParseException.class,
- org.opensearch.OpenSearchParseException::new,
- 35,
- UNKNOWN_VERSION_ADDED
- )
- );
registerExceptionHandle(
new OpenSearchExceptionHandle(
org.opensearch.search.SearchException.class,
@@ -915,14 +907,6 @@ public static void registerExceptions() {
UNKNOWN_VERSION_ADDED
)
);
- registerExceptionHandle(
- new OpenSearchExceptionHandle(
- org.opensearch.common.breaker.CircuitBreakingException.class,
- org.opensearch.common.breaker.CircuitBreakingException::new,
- 133,
- UNKNOWN_VERSION_ADDED
- )
- );
registerExceptionHandle(
new OpenSearchExceptionHandle(
org.opensearch.transport.NodeNotConnectedException.class,
diff --git a/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java b/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java
index 7899324a3301e..2f376d81fa202 100644
--- a/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java
+++ b/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java
@@ -37,7 +37,7 @@
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportException;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
import java.util.Objects;
diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java
index 2ce1d4bcd4b02..b775095861150 100644
--- a/server/src/main/java/org/opensearch/action/ActionModule.java
+++ b/server/src/main/java/org/opensearch/action/ActionModule.java
@@ -301,7 +301,7 @@
import org.opensearch.index.seqno.RetentionLeaseActions;
import org.opensearch.identity.IdentityService;
import org.opensearch.indices.SystemIndices;
-import org.opensearch.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
import org.opensearch.persistent.CompletionPersistentTaskAction;
import org.opensearch.persistent.RemovePersistentTaskAction;
import org.opensearch.persistent.StartPersistentTaskAction;
diff --git a/server/src/main/java/org/opensearch/action/ActionResponse.java b/server/src/main/java/org/opensearch/action/ActionResponse.java
index fd13971433d8b..e1d4da760b35b 100644
--- a/server/src/main/java/org/opensearch/action/ActionResponse.java
+++ b/server/src/main/java/org/opensearch/action/ActionResponse.java
@@ -33,7 +33,7 @@
package org.opensearch.action;
import org.opensearch.core.common.io.stream.StreamInput;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java
index 714abe86df71e..019a1e1417510 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java
@@ -39,7 +39,7 @@
import org.opensearch.cluster.health.ClusterHealthStatus;
import org.opensearch.cluster.health.ClusterIndexHealth;
import org.opensearch.cluster.health.ClusterStateHealth;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.ClusterSettings;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java
index d84179fa6bc0a..09f6b7cd06cda 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java
@@ -60,7 +60,7 @@
import org.opensearch.common.inject.Inject;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.common.unit.TimeValue;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.core.common.Strings;
import org.opensearch.discovery.ClusterManagerNotDiscoveredException;
import org.opensearch.discovery.Discovery;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java
index acf40e3a9de3c..1d2dbf66920f8 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java
@@ -40,7 +40,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.http.HttpInfo;
import org.opensearch.ingest.IngestInfo;
import org.opensearch.monitor.jvm.JvmInfo;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java
index 3dcb30a091a94..566ae06649168 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java
@@ -36,7 +36,6 @@
import org.opensearch.action.support.nodes.BaseNodesResponse;
import org.opensearch.cluster.ClusterName;
import org.opensearch.cluster.node.DiscoveryNodeRole;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
@@ -165,7 +164,7 @@ public String toString() {
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
- return Strings.toString(builder);
+ return builder.toString();
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java
index 22044f0c69c48..a4a69cd301b41 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java
@@ -38,7 +38,6 @@
import org.opensearch.action.support.nodes.BaseNodesResponse;
import org.opensearch.cluster.ClusterName;
import org.opensearch.cluster.node.DiscoveryNode;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.xcontent.ToXContentFragment;
@@ -99,7 +98,7 @@ public String toString() {
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
- return Strings.toString(builder);
+ return builder.toString();
} catch (final IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java
index 3b6c85ee6e091..82e03d1fd79ac 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java
@@ -49,7 +49,7 @@
import org.opensearch.index.stats.ShardIndexingPressureStats;
import org.opensearch.index.store.remote.filecache.FileCacheStats;
import org.opensearch.indices.NodeIndicesStats;
-import org.opensearch.indices.breaker.AllCircuitBreakerStats;
+import org.opensearch.core.indices.breaker.AllCircuitBreakerStats;
import org.opensearch.ingest.IngestStats;
import org.opensearch.monitor.fs.FsInfo;
import org.opensearch.monitor.jvm.JvmStats;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java
index 0037d1ea27873..539bae311ac98 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java
@@ -35,7 +35,6 @@
import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.nodes.BaseNodesResponse;
import org.opensearch.cluster.ClusterName;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.xcontent.ToXContentFragment;
@@ -92,7 +91,7 @@ public String toString() {
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
- return Strings.toString(builder);
+ return builder.toString();
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java
index 4741504ddd035..f2ebde642d2be 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java
@@ -35,7 +35,7 @@
import org.opensearch.OpenSearchException;
import org.opensearch.action.TaskOperationFailure;
import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.ConstructingObjectParser;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java
index b32e59fc77794..36bec88109cf1 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java
@@ -33,7 +33,7 @@
package org.opensearch.action.admin.cluster.node.tasks.get;
import org.opensearch.action.ActionResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java
index 85b165df68cfa..1e2e432882623 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java
@@ -39,8 +39,8 @@
import org.opensearch.cluster.node.DiscoveryNodeRole;
import org.opensearch.cluster.node.DiscoveryNodes;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
import org.opensearch.common.TriFunction;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java
index 59b1f99429cfe..e0c8e6e8e269e 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java
@@ -35,7 +35,6 @@
import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.nodes.BaseNodesResponse;
import org.opensearch.cluster.ClusterName;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.xcontent.ToXContentFragment;
@@ -93,7 +92,7 @@ public String toString() {
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
- return Strings.toString(builder);
+ return builder.toString();
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java
index 703b9575a88ad..fd29c324d51a1 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java
@@ -35,7 +35,8 @@
public class RestoreRemoteStoreRequest extends ClusterManagerNodeRequest implements ToXContentObject {
private String[] indices = Strings.EMPTY_ARRAY;
- private Boolean waitForCompletion;
+ private Boolean waitForCompletion = false;
+ private Boolean restoreAllShards = false;
public RestoreRemoteStoreRequest() {}
@@ -43,6 +44,7 @@ public RestoreRemoteStoreRequest(StreamInput in) throws IOException {
super(in);
indices = in.readStringArray();
waitForCompletion = in.readOptionalBoolean();
+ restoreAllShards = in.readOptionalBoolean();
}
@Override
@@ -50,6 +52,7 @@ public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(indices);
out.writeOptionalBoolean(waitForCompletion);
+ out.writeOptionalBoolean(restoreAllShards);
}
@Override
@@ -118,6 +121,27 @@ public boolean waitForCompletion() {
return waitForCompletion;
}
+ /**
+ * Set the value for restoreAllShards, denoting whether to restore all shards or only unassigned shards
+ *
+ * @param restoreAllShards If true, the operation will restore all the shards of the given indices.
+ * If false, the operation will restore only the unassigned shards of the given indices.
+ * @return this request
+ */
+ public RestoreRemoteStoreRequest restoreAllShards(boolean restoreAllShards) {
+ this.restoreAllShards = restoreAllShards;
+ return this;
+ }
+
+ /**
+ * Returns restoreAllShards setting
+ *
+ * @return true if the operation will restore all the shards of the given indices
+ */
+ public boolean restoreAllShards() {
+ return restoreAllShards;
+ }
+
/**
* Parses restore definition
*
@@ -167,18 +191,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RestoreRemoteStoreRequest that = (RestoreRemoteStoreRequest) o;
- return waitForCompletion == that.waitForCompletion && Arrays.equals(indices, that.indices);
+ return waitForCompletion == that.waitForCompletion
+ && restoreAllShards == that.restoreAllShards
+ && Arrays.equals(indices, that.indices);
}
@Override
public int hashCode() {
- int result = Objects.hash(waitForCompletion);
+ int result = Objects.hash(waitForCompletion, restoreAllShards);
result = 31 * result + Arrays.hashCode(indices);
return result;
}
@Override
public String toString() {
- return org.opensearch.common.Strings.toString(XContentType.JSON, this);
+ return Strings.toString(XContentType.JSON, this);
}
+
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java
index 5ac9c1cf5f74c..6b4c9a26ab19b 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java
@@ -11,9 +11,10 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
+import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.index.remote.RemoteRefreshSegmentTracker;
+import org.opensearch.index.remote.RemoteSegmentTransferTracker;
import java.io.IOException;
@@ -24,72 +25,128 @@
*/
public class RemoteStoreStats implements Writeable, ToXContentFragment {
- private final RemoteRefreshSegmentTracker.Stats remoteSegmentUploadShardStats;
+ private final RemoteSegmentTransferTracker.Stats remoteSegmentShardStats;
- public RemoteStoreStats(RemoteRefreshSegmentTracker.Stats remoteSegmentUploadShardStats) {
- this.remoteSegmentUploadShardStats = remoteSegmentUploadShardStats;
+ private final ShardRouting shardRouting;
+
+ public RemoteStoreStats(RemoteSegmentTransferTracker.Stats remoteSegmentUploadShardStats, ShardRouting shardRouting) {
+ this.remoteSegmentShardStats = remoteSegmentUploadShardStats;
+ this.shardRouting = shardRouting;
}
public RemoteStoreStats(StreamInput in) throws IOException {
- remoteSegmentUploadShardStats = in.readOptionalWriteable(RemoteRefreshSegmentTracker.Stats::new);
+ this.remoteSegmentShardStats = in.readOptionalWriteable(RemoteSegmentTransferTracker.Stats::new);
+ this.shardRouting = new ShardRouting(in);
+ }
+
+ public RemoteSegmentTransferTracker.Stats getStats() {
+ return remoteSegmentShardStats;
}
- public RemoteRefreshSegmentTracker.Stats getStats() {
- return remoteSegmentUploadShardStats;
+ public ShardRouting getShardRouting() {
+ return shardRouting;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject()
- .field(Fields.SHARD_ID, remoteSegmentUploadShardStats.shardId)
- .field(Fields.LOCAL_REFRESH_TIMESTAMP, remoteSegmentUploadShardStats.localRefreshClockTimeMs)
- .field(Fields.REMOTE_REFRESH_TIMESTAMP, remoteSegmentUploadShardStats.remoteRefreshClockTimeMs)
- .field(Fields.REFRESH_TIME_LAG_IN_MILLIS, remoteSegmentUploadShardStats.refreshTimeLagMs)
- .field(Fields.REFRESH_LAG, remoteSegmentUploadShardStats.localRefreshNumber - remoteSegmentUploadShardStats.remoteRefreshNumber)
- .field(Fields.BYTES_LAG, remoteSegmentUploadShardStats.bytesLag)
-
- .field(Fields.BACKPRESSURE_REJECTION_COUNT, remoteSegmentUploadShardStats.rejectionCount)
- .field(Fields.CONSECUTIVE_FAILURE_COUNT, remoteSegmentUploadShardStats.consecutiveFailuresCount);
-
- builder.startObject(Fields.TOTAL_REMOTE_REFRESH);
- builder.field(SubFields.STARTED, remoteSegmentUploadShardStats.totalUploadsStarted)
- .field(SubFields.SUCCEEDED, remoteSegmentUploadShardStats.totalUploadsSucceeded)
- .field(SubFields.FAILED, remoteSegmentUploadShardStats.totalUploadsFailed);
+ builder.startObject();
+ buildShardRouting(builder);
+ builder.startObject(Fields.SEGMENT);
+ builder.startObject(SubFields.DOWNLOAD);
+ // Ensuring that we are not showing 0 metrics to the user
+ if (remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesStarted != 0) {
+ buildDownloadStats(builder);
+ }
+ builder.endObject();
+ builder.startObject(SubFields.UPLOAD);
+ // Ensuring that we are not showing 0 metrics to the user
+ if (remoteSegmentShardStats.totalUploadsStarted != 0) {
+ buildUploadStats(builder);
+ }
builder.endObject();
-
- builder.startObject(Fields.TOTAL_UPLOADS_IN_BYTES);
- builder.field(SubFields.STARTED, remoteSegmentUploadShardStats.uploadBytesStarted)
- .field(SubFields.SUCCEEDED, remoteSegmentUploadShardStats.uploadBytesSucceeded)
- .field(SubFields.FAILED, remoteSegmentUploadShardStats.uploadBytesFailed);
builder.endObject();
+ return builder.endObject();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeOptionalWriteable(remoteSegmentShardStats);
+ shardRouting.writeTo(out);
+ }
- builder.startObject(Fields.REMOTE_REFRESH_SIZE_IN_BYTES);
- builder.field(SubFields.LAST_SUCCESSFUL, remoteSegmentUploadShardStats.lastSuccessfulRemoteRefreshBytes);
- builder.field(SubFields.MOVING_AVG, remoteSegmentUploadShardStats.uploadBytesMovingAverage);
+ private void buildUploadStats(XContentBuilder builder) throws IOException {
+ builder.field(UploadStatsFields.LOCAL_REFRESH_TIMESTAMP, remoteSegmentShardStats.localRefreshClockTimeMs)
+ .field(UploadStatsFields.REMOTE_REFRESH_TIMESTAMP, remoteSegmentShardStats.remoteRefreshClockTimeMs)
+ .field(UploadStatsFields.REFRESH_TIME_LAG_IN_MILLIS, remoteSegmentShardStats.refreshTimeLagMs)
+ .field(UploadStatsFields.REFRESH_LAG, remoteSegmentShardStats.localRefreshNumber - remoteSegmentShardStats.remoteRefreshNumber)
+ .field(UploadStatsFields.BYTES_LAG, remoteSegmentShardStats.bytesLag)
+ .field(UploadStatsFields.BACKPRESSURE_REJECTION_COUNT, remoteSegmentShardStats.rejectionCount)
+ .field(UploadStatsFields.CONSECUTIVE_FAILURE_COUNT, remoteSegmentShardStats.consecutiveFailuresCount);
+ builder.startObject(UploadStatsFields.TOTAL_SYNCS_TO_REMOTE)
+ .field(SubFields.STARTED, remoteSegmentShardStats.totalUploadsStarted)
+ .field(SubFields.SUCCEEDED, remoteSegmentShardStats.totalUploadsSucceeded)
+ .field(SubFields.FAILED, remoteSegmentShardStats.totalUploadsFailed);
builder.endObject();
+ builder.startObject(UploadStatsFields.TOTAL_UPLOADS_IN_BYTES)
+ .field(SubFields.STARTED, remoteSegmentShardStats.uploadBytesStarted)
+ .field(SubFields.SUCCEEDED, remoteSegmentShardStats.uploadBytesSucceeded)
+ .field(SubFields.FAILED, remoteSegmentShardStats.uploadBytesFailed);
+ builder.endObject();
+ builder.startObject(UploadStatsFields.REMOTE_REFRESH_SIZE_IN_BYTES)
+ .field(SubFields.LAST_SUCCESSFUL, remoteSegmentShardStats.lastSuccessfulRemoteRefreshBytes)
+ .field(SubFields.MOVING_AVG, remoteSegmentShardStats.uploadBytesMovingAverage);
+ builder.endObject();
+ builder.startObject(UploadStatsFields.UPLOAD_LATENCY_IN_BYTES_PER_SEC)
+ .field(SubFields.MOVING_AVG, remoteSegmentShardStats.uploadBytesPerSecMovingAverage);
+ builder.endObject();
+ builder.startObject(UploadStatsFields.REMOTE_REFRESH_LATENCY_IN_MILLIS)
+ .field(SubFields.MOVING_AVG, remoteSegmentShardStats.uploadTimeMovingAverage);
+ builder.endObject();
+ }
- builder.startObject(Fields.UPLOAD_LATENCY_IN_BYTES_PER_SEC);
- builder.field(SubFields.MOVING_AVG, remoteSegmentUploadShardStats.uploadBytesPerSecMovingAverage);
+ private void buildDownloadStats(XContentBuilder builder) throws IOException {
+ builder.field(
+ DownloadStatsFields.LAST_SYNC_TIMESTAMP,
+ remoteSegmentShardStats.directoryFileTransferTrackerStats.lastTransferTimestampMs
+ );
+ builder.startObject(DownloadStatsFields.TOTAL_DOWNLOADS_IN_BYTES)
+ .field(SubFields.STARTED, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesStarted)
+ .field(SubFields.SUCCEEDED, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesSucceeded)
+ .field(SubFields.FAILED, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesFailed);
builder.endObject();
- builder.startObject(Fields.REMOTE_REFRESH_LATENCY_IN_MILLIS);
- builder.field(SubFields.MOVING_AVG, remoteSegmentUploadShardStats.uploadTimeMovingAverage);
+ builder.startObject(DownloadStatsFields.DOWNLOAD_SIZE_IN_BYTES)
+ .field(SubFields.LAST_SUCCESSFUL, remoteSegmentShardStats.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes)
+ .field(SubFields.MOVING_AVG, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesMovingAverage);
builder.endObject();
+ builder.startObject(DownloadStatsFields.DOWNLOAD_SPEED_IN_BYTES_PER_SEC)
+ .field(SubFields.MOVING_AVG, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage);
builder.endObject();
+ }
- return builder;
+ private void buildShardRouting(XContentBuilder builder) throws IOException {
+ builder.startObject(Fields.ROUTING);
+ builder.field(RoutingFields.STATE, shardRouting.state());
+ builder.field(RoutingFields.PRIMARY, shardRouting.primary());
+ builder.field(RoutingFields.NODE_ID, shardRouting.currentNodeId());
+ builder.endObject();
}
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeOptionalWriteable(remoteSegmentUploadShardStats);
+ static final class Fields {
+ static final String ROUTING = "routing";
+ static final String SEGMENT = "segment";
+ static final String TRANSLOG = "translog";
+ }
+
+ static final class RoutingFields {
+ static final String STATE = "state";
+ static final String PRIMARY = "primary";
+ static final String NODE_ID = "node";
}
/**
* Fields for remote store stats response
*/
- static final class Fields {
- static final String SHARD_ID = "shard_id";
-
+ static final class UploadStatsFields {
/**
* Lag in terms of bytes b/w local and remote store
*/
@@ -128,7 +185,7 @@ static final class Fields {
/**
* Represents the number of remote refreshes
*/
- static final String TOTAL_REMOTE_REFRESH = "total_remote_refresh";
+ static final String TOTAL_SYNCS_TO_REMOTE = "total_syncs_to_remote";
/**
* Represents the total uploads to remote store in bytes
@@ -151,21 +208,46 @@ static final class Fields {
static final String REMOTE_REFRESH_LATENCY_IN_MILLIS = "remote_refresh_latency_in_millis";
}
+ static final class DownloadStatsFields {
+ /**
+ * Last successful sync from remote in milliseconds
+ */
+ static final String LAST_SYNC_TIMESTAMP = "last_sync_timestamp";
+
+ /**
+ * Total bytes of segment files downloaded from the remote store for a specific shard
+ */
+ static final String TOTAL_DOWNLOADS_IN_BYTES = "total_downloads_in_bytes";
+
+ /**
+ * Size of each segment file downloaded from the remote store
+ */
+ static final String DOWNLOAD_SIZE_IN_BYTES = "download_size_in_bytes";
+
+ /**
+ * Speed (in bytes/sec) for segment file downloads
+ */
+ static final String DOWNLOAD_SPEED_IN_BYTES_PER_SEC = "download_speed_in_bytes_per_sec";
+ }
+
/**
- * Reusable sub fields for {@link Fields}
+ * Reusable sub fields for {@link UploadStatsFields} and {@link DownloadStatsFields}
*/
static final class SubFields {
static final String STARTED = "started";
static final String SUCCEEDED = "succeeded";
static final String FAILED = "failed";
+ static final String DOWNLOAD = "download";
+ static final String UPLOAD = "upload";
+
/**
- * Moving avg over last N values stat for a {@link Fields}
+ * Moving avg over last N values stat
*/
static final String MOVING_AVG = "moving_avg";
/**
- * Most recent successful attempt stat for a {@link Fields}
+ * Most recent successful attempt stat
*/
static final String LAST_SUCCESSFUL = "last_successful";
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java
index 20023e30a271e..4f0832816fd8a 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java
@@ -10,14 +10,17 @@
import org.opensearch.core.action.support.DefaultShardOperationFailedException;
import org.opensearch.action.support.broadcast.BroadcastResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.XContentBuilder;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
/**
* Remote Store stats response
@@ -26,49 +29,71 @@
*/
public class RemoteStoreStatsResponse extends BroadcastResponse {
- private final RemoteStoreStats[] shards;
+ private final RemoteStoreStats[] remoteStoreStats;
public RemoteStoreStatsResponse(StreamInput in) throws IOException {
super(in);
- shards = in.readArray(RemoteStoreStats::new, RemoteStoreStats[]::new);
+ remoteStoreStats = in.readArray(RemoteStoreStats::new, RemoteStoreStats[]::new);
}
public RemoteStoreStatsResponse(
- RemoteStoreStats[] shards,
+ RemoteStoreStats[] remoteStoreStats,
int totalShards,
int successfulShards,
int failedShards,
List shardFailures
) {
super(totalShards, successfulShards, failedShards, shardFailures);
- this.shards = shards;
+ this.remoteStoreStats = remoteStoreStats;
}
- public RemoteStoreStats[] getShards() {
- return this.shards;
+ public RemoteStoreStats[] getRemoteStoreStats() {
+ return this.remoteStoreStats;
}
- public RemoteStoreStats getAt(int position) {
- return shards[position];
+ public Map>> groupByIndexAndShards() {
+ Map>> indexWiseStats = new HashMap<>();
+ for (RemoteStoreStats shardStat : remoteStoreStats) {
+ indexWiseStats.computeIfAbsent(shardStat.getShardRouting().getIndexName(), k -> new HashMap<>())
+ .computeIfAbsent(shardStat.getShardRouting().getId(), k -> new ArrayList<>())
+ .add(shardStat);
+ }
+ return indexWiseStats;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeArray(shards);
+ out.writeArray(remoteStoreStats);
}
@Override
protected void addCustomXContentFields(XContentBuilder builder, Params params) throws IOException {
- builder.startArray("stats");
- for (RemoteStoreStats shard : shards) {
- shard.toXContent(builder, params);
+ Map>> indexWiseStats = groupByIndexAndShards();
+ builder.startObject(Fields.INDICES);
+ for (String indexName : indexWiseStats.keySet()) {
+ builder.startObject(indexName);
+ builder.startObject(Fields.SHARDS);
+ for (int shardId : indexWiseStats.get(indexName).keySet()) {
+ builder.startArray(Integer.toString(shardId));
+ for (RemoteStoreStats shardStat : indexWiseStats.get(indexName).get(shardId)) {
+ shardStat.toXContent(builder, params);
+ }
+ builder.endArray();
+ }
+ builder.endObject();
+ builder.endObject();
}
- builder.endArray();
+ builder.endObject();
}
@Override
public String toString() {
return Strings.toString(XContentType.JSON, this, true, false);
}
+
+ static final class Fields {
+ static final String SHARDS = "shards";
+ static final String INDICES = "indices";
+ }
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java
index 434abd1207f50..37835a5add3d6 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java
@@ -24,7 +24,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.index.IndexService;
import org.opensearch.index.remote.RemoteRefreshSegmentPressureService;
-import org.opensearch.index.remote.RemoteRefreshSegmentTracker;
+import org.opensearch.index.remote.RemoteSegmentTransferTracker;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.index.shard.ShardNotFoundException;
import org.opensearch.indices.IndicesService;
@@ -49,6 +49,7 @@ public class TransportRemoteStoreStatsAction extends TransportBroadcastByNodeAct
RemoteStoreStats> {
private final IndicesService indicesService;
+
private final RemoteRefreshSegmentPressureService remoteRefreshSegmentPressureService;
@Inject
@@ -95,7 +96,6 @@ protected ShardsIterator shards(ClusterState clusterState, RemoteStoreStatsReque
|| (shardRouting.currentNodeId() == null
|| shardRouting.currentNodeId().equals(clusterState.getNodes().getLocalNodeId()))
)
- .filter(ShardRouting::primary)
.filter(
shardRouting -> Boolean.parseBoolean(
clusterState.getMetadata().index(shardRouting.index()).getSettings().get(IndexMetadata.SETTING_REMOTE_STORE_ENABLED)
@@ -153,11 +153,10 @@ protected RemoteStoreStats shardOperation(RemoteStoreStatsRequest request, Shard
throw new ShardNotFoundException(indexShard.shardId());
}
- RemoteRefreshSegmentTracker remoteRefreshSegmentTracker = remoteRefreshSegmentPressureService.getRemoteRefreshSegmentTracker(
+ RemoteSegmentTransferTracker remoteSegmentTransferTracker = remoteRefreshSegmentPressureService.getRemoteRefreshSegmentTracker(
indexShard.shardId()
);
- assert Objects.nonNull(remoteRefreshSegmentTracker);
-
- return new RemoteStoreStats(remoteRefreshSegmentTracker.stats());
+ assert Objects.nonNull(remoteSegmentTransferTracker);
+ return new RemoteStoreStats(remoteSegmentTransferTracker.stats(), indexShard.routingEntry());
}
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java
index 8b9142a39e9c9..9a97b67e1c2b7 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java
@@ -35,7 +35,7 @@
import org.opensearch.action.ActionResponse;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java
index 8d29baa82562a..9a8206a4cfdba 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java
@@ -34,7 +34,7 @@
import org.opensearch.action.ActionResponse;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.ConstructingObjectParser;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java
index fedcfa1f5d9ff..5729386259df1 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java
@@ -36,7 +36,7 @@
import org.opensearch.action.IndicesRequest;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
index 5da3f2eb01260..9736d99b9f886 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
@@ -38,12 +38,13 @@
import org.opensearch.action.IndicesRequest;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
@@ -73,7 +74,7 @@
* must not contain hash sign ('#')
* must not start with underscore ('_')
* must be lowercase
- * must not contain invalid file name characters {@link org.opensearch.common.Strings#INVALID_FILENAME_CHARS}
+ * must not contain invalid file name characters {@link Strings#INVALID_FILENAME_CHARS}
*
*
* @opensearch.internal
@@ -387,9 +388,9 @@ public CreateSnapshotRequest settings(String source, MediaType mediaType) {
*/
public CreateSnapshotRequest settings(Map source) {
try {
- XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON);
builder.map(source);
- settings(Strings.toString(builder), builder.contentType());
+ settings(builder.toString(), builder.contentType());
} catch (IOException e) {
throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e);
}
@@ -446,7 +447,7 @@ public CreateSnapshotRequest source(Map source) {
String name = entry.getKey();
if (name.equals("indices")) {
if (entry.getValue() instanceof String) {
- indices(org.opensearch.core.common.Strings.splitStringByCommaToArray((String) entry.getValue()));
+ indices(Strings.splitStringByCommaToArray((String) entry.getValue()));
} else if (entry.getValue() instanceof List) {
indices((List) entry.getValue());
} else {
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java
index 3c96c67f78fd4..b8b4d972c95f7 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java
@@ -33,7 +33,7 @@
package org.opensearch.action.admin.cluster.snapshots.get;
import org.opensearch.action.ActionResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java
index 11ea4112f6e67..dbf47f2b121e3 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java
@@ -37,8 +37,8 @@
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.CollectionUtil;
import org.opensearch.action.ActionListener;
+import org.opensearch.action.StepListener;
import org.opensearch.action.support.ActionFilters;
-import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeAction;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.SnapshotsInProgress;
@@ -138,57 +138,64 @@ protected void clusterManagerOperation(
currentSnapshots.add(snapshotInfo);
}
- final RepositoryData repositoryData;
+ final StepListener repositoryDataListener = new StepListener<>();
if (isCurrentSnapshotsOnly(request.snapshots()) == false) {
- repositoryData = PlainActionFuture.get(fut -> repositoriesService.getRepositoryData(repository, fut));
- for (SnapshotId snapshotId : repositoryData.getSnapshotIds()) {
- allSnapshotIds.put(snapshotId.getName(), snapshotId);
- }
+ repositoriesService.getRepositoryData(repository, repositoryDataListener);
} else {
- repositoryData = null;
+ // Setting repositoryDataListener response to be null if the request has only current snapshot
+ repositoryDataListener.onResponse(null);
}
+ repositoryDataListener.whenComplete(repositoryData -> {
+ if (repositoryData != null) {
+ for (SnapshotId snapshotId : repositoryData.getSnapshotIds()) {
+ allSnapshotIds.put(snapshotId.getName(), snapshotId);
+ }
+ }
- final Set toResolve = new HashSet<>();
- if (isAllSnapshots(request.snapshots())) {
- toResolve.addAll(allSnapshotIds.values());
- } else {
- for (String snapshotOrPattern : request.snapshots()) {
- if (GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshotOrPattern)) {
- toResolve.addAll(currentSnapshots.stream().map(SnapshotInfo::snapshotId).collect(Collectors.toList()));
- } else if (Regex.isSimpleMatchPattern(snapshotOrPattern) == false) {
- if (allSnapshotIds.containsKey(snapshotOrPattern)) {
- toResolve.add(allSnapshotIds.get(snapshotOrPattern));
- } else if (request.ignoreUnavailable() == false) {
- throw new SnapshotMissingException(repository, snapshotOrPattern);
- }
- } else {
- for (Map.Entry entry : allSnapshotIds.entrySet()) {
- if (Regex.simpleMatch(snapshotOrPattern, entry.getKey())) {
- toResolve.add(entry.getValue());
+ final Set toResolve = new HashSet<>();
+ if (isAllSnapshots(request.snapshots())) {
+ toResolve.addAll(allSnapshotIds.values());
+ } else {
+ for (String snapshotOrPattern : request.snapshots()) {
+ if (GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshotOrPattern)) {
+ toResolve.addAll(currentSnapshots.stream().map(SnapshotInfo::snapshotId).collect(Collectors.toList()));
+ } else if (Regex.isSimpleMatchPattern(snapshotOrPattern) == false) {
+ if (allSnapshotIds.containsKey(snapshotOrPattern)) {
+ toResolve.add(allSnapshotIds.get(snapshotOrPattern));
+ } else if (request.ignoreUnavailable() == false) {
+ throw new SnapshotMissingException(repository, snapshotOrPattern);
+ }
+ } else {
+ for (Map.Entry entry : allSnapshotIds.entrySet()) {
+ if (Regex.simpleMatch(snapshotOrPattern, entry.getKey())) {
+ toResolve.add(entry.getValue());
+ }
}
}
}
- }
- if (toResolve.isEmpty() && request.ignoreUnavailable() == false && isCurrentSnapshotsOnly(request.snapshots()) == false) {
- throw new SnapshotMissingException(repository, request.snapshots()[0]);
+ if (toResolve.isEmpty()
+ && request.ignoreUnavailable() == false
+ && isCurrentSnapshotsOnly(request.snapshots()) == false) {
+ throw new SnapshotMissingException(repository, request.snapshots()[0]);
+ }
}
- }
- final List snapshotInfos;
- if (request.verbose()) {
- snapshotInfos = snapshots(snapshotsInProgress, repository, new ArrayList<>(toResolve), request.ignoreUnavailable());
- } else {
- if (repositoryData != null) {
- // want non-current snapshots as well, which are found in the repository data
- snapshotInfos = buildSimpleSnapshotInfos(toResolve, repositoryData, currentSnapshots);
+ final List snapshotInfos;
+ if (request.verbose()) {
+ snapshotInfos = snapshots(snapshotsInProgress, repository, new ArrayList<>(toResolve), request.ignoreUnavailable());
} else {
- // only want current snapshots
- snapshotInfos = currentSnapshots.stream().map(SnapshotInfo::basic).collect(Collectors.toList());
- CollectionUtil.timSort(snapshotInfos);
+ if (repositoryData != null) {
+ // want non-current snapshots as well, which are found in the repository data
+ snapshotInfos = buildSimpleSnapshotInfos(toResolve, repositoryData, currentSnapshots);
+ } else {
+ // only want current snapshots
+ snapshotInfos = currentSnapshots.stream().map(SnapshotInfo::basic).collect(Collectors.toList());
+ CollectionUtil.timSort(snapshotInfos);
+ }
}
- }
- listener.onResponse(new GetSnapshotsResponse(snapshotInfos));
+ listener.onResponse(new GetSnapshotsResponse(snapshotInfos));
+ }, listener::onFailure);
} catch (Exception e) {
listener.onFailure(e);
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java
index 840564a4bd7a2..0f9aa65afe3c2 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java
@@ -517,7 +517,7 @@ public String snapshotUuid() {
/**
* Sets the storage type for this request.
*/
- RestoreSnapshotRequest storageType(StorageType storageType) {
+ public RestoreSnapshotRequest storageType(StorageType storageType) {
this.storageType = storageType;
return this;
}
@@ -749,6 +749,6 @@ public int hashCode() {
@Override
public String toString() {
- return org.opensearch.common.Strings.toString(XContentType.JSON, this);
+ return Strings.toString(XContentType.JSON, this);
}
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java
index 282585a43183a..c7690ea0d7817 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java
@@ -32,11 +32,11 @@
package org.opensearch.action.admin.cluster.snapshots.status;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.ToXContentObject;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
index a3b401980b109..d1e25c1f1bdc4 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
@@ -35,7 +35,7 @@
import org.opensearch.cluster.SnapshotsInProgress;
import org.opensearch.cluster.SnapshotsInProgress.State;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java
index 78952805e5b49..b49c18082d0dc 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java
@@ -48,7 +48,7 @@
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.inject.Inject;
import org.opensearch.core.common.io.stream.StreamInput;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.common.util.set.Sets;
import org.opensearch.core.common.Strings;
import org.opensearch.core.index.shard.ShardId;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java
index 9cdd5bf244ecb..84b093f9bb238 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java
@@ -35,7 +35,7 @@
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.metadata.MappingMetadata;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java
index 699884ca0eab3..6dd7e09aeae0d 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java
@@ -41,8 +41,8 @@
import org.opensearch.common.metrics.OperationStats;
import org.opensearch.common.network.NetworkModule;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.transport.TransportAddress;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.ToXContentFragment;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java
index 1dda39a17babc..8926f41777809 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java
@@ -37,7 +37,6 @@
import org.opensearch.cluster.ClusterName;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.health.ClusterHealthStatus;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.xcontent.ToXContentFragment;
@@ -168,7 +167,7 @@ public String toString() {
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
- return Strings.toString(builder);
+ return builder.toString();
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java
index 90c78f30ea78d..66d1fc6a52295 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java
@@ -35,7 +35,7 @@
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.metadata.MappingMetadata;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java
index 761373a001ffe..8b328bc3879dd 100644
--- a/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java
@@ -68,7 +68,7 @@ public PutStoredScriptRequest(StreamInput in) throws IOException {
super(in);
id = in.readOptionalString();
content = in.readBytesReference();
- if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType = in.readMediaType();
} else {
mediaType = in.readEnum(XContentType.class);
@@ -152,7 +152,7 @@ public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(id);
out.writeBytesReference(content);
- if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType.writeTo(out);
} else {
out.writeEnum((XContentType) mediaType);
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java
index 9118cdd56babd..94dbb5ff46a02 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java
@@ -35,17 +35,16 @@
import org.opensearch.OpenSearchGenerationException;
import org.opensearch.common.Nullable;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.xcontent.XContentParser;
-import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.query.QueryBuilder;
import java.io.IOException;
@@ -127,9 +126,9 @@ public Alias filter(Map filter) {
return this;
}
try {
- XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(MediaTypeRegistry.JSON);
builder.map(filter);
- this.filter = Strings.toString(builder);
+ this.filter = builder.toString();
return this;
} catch (IOException e) {
throw new OpenSearchGenerationException("Failed to generate [" + filter + "]", e);
@@ -145,10 +144,10 @@ public Alias filter(QueryBuilder filterBuilder) {
return this;
}
try {
- XContentBuilder builder = XContentFactory.jsonBuilder();
+ XContentBuilder builder = MediaTypeRegistry.JSON.contentBuilder();
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close();
- this.filter = Strings.toString(builder);
+ this.filter = builder.toString();
return this;
} catch (IOException e) {
throw new OpenSearchGenerationException("Failed to build json for alias request", e);
@@ -278,7 +277,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
if (filter != null) {
try (InputStream stream = new BytesArray(filter).streamInput()) {
- builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON);
+ builder.rawField(FILTER.getPreferredName(), stream, MediaTypeRegistry.JSON);
}
}
@@ -305,7 +304,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
@Override
public String toString() {
- return Strings.toString(XContentType.JSON, this);
+ return Strings.toString(MediaTypeRegistry.JSON, this);
}
@Override
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java
index 142cbe6a0ab0b..cd99a1067a8a4 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java
@@ -48,6 +48,7 @@
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.ConstructingObjectParser;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ObjectParser;
import org.opensearch.core.xcontent.ObjectParser.ValueType;
import org.opensearch.core.xcontent.ToXContent;
@@ -428,9 +429,9 @@ public AliasActions filter(Map filter) {
return this;
}
try {
- XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON);
builder.map(filter);
- this.filter = org.opensearch.common.Strings.toString(builder);
+ this.filter = builder.toString();
return this;
} catch (IOException e) {
throw new OpenSearchGenerationException("Failed to generate [" + filter + "]", e);
@@ -446,7 +447,7 @@ public AliasActions filter(QueryBuilder filter) {
XContentBuilder builder = XContentFactory.jsonBuilder();
filter.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close();
- this.filter = org.opensearch.common.Strings.toString(builder);
+ this.filter = builder.toString();
return this;
} catch (IOException e) {
throw new OpenSearchGenerationException("Failed to build json for alias request", e);
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java
index 37e05151c8179..00144eedc438f 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java
@@ -36,7 +36,7 @@
import org.opensearch.action.ActionResponse;
import org.opensearch.action.ActionType;
import org.opensearch.action.support.single.shard.SingleShardRequest;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java
index 8c6dde80d8d97..0f98550343a13 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java
@@ -39,7 +39,7 @@
import org.opensearch.action.support.master.AcknowledgedRequest;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java
index 8e58ee92db80a..c069cd17b8c51 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java
@@ -35,11 +35,11 @@
import org.opensearch.core.action.support.DefaultShardOperationFailedException;
import org.opensearch.action.support.master.ShardsAcknowledgedResponse;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java
index 53840b7697e45..001b466fc47e5 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java
@@ -42,7 +42,6 @@
import org.opensearch.action.support.ActiveShardCount;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.action.support.master.AcknowledgedRequest;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -230,7 +229,7 @@ private CreateIndexRequest settings(String source, MediaType mediaType) {
* Allows to set the settings using a json builder.
*/
public CreateIndexRequest settings(XContentBuilder builder) {
- settings(Strings.toString(builder), builder.contentType());
+ settings(builder.toString(), builder.contentType());
return this;
}
@@ -346,7 +345,7 @@ private CreateIndexRequest mapping(String type, Map source) {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.map(source);
- return mapping(Strings.toString(builder));
+ return mapping(builder.toString());
} catch (IOException e) {
throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e);
}
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java
index a716959614065..27f20f028ea74 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java
@@ -39,6 +39,7 @@
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
+import org.opensearch.core.xcontent.MediaType;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentType;
@@ -200,8 +201,8 @@ public CreateIndexRequestBuilder addAlias(Alias alias) {
/**
* Sets the settings and mappings as a single source.
*/
- public CreateIndexRequestBuilder setSource(String source, XContentType xContentType) {
- request.source(source, xContentType);
+ public CreateIndexRequestBuilder setSource(String source, MediaType mediaType) {
+ request.source(source, mediaType);
return this;
}
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java
index 0b2375850f1fc..2b536d24c946f 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java
@@ -55,7 +55,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.IndexService;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java
index e5ae7cd582481..5f5be3c166dbc 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java
@@ -59,7 +59,7 @@
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.regex.Regex;
import org.opensearch.common.unit.TimeValue;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.core.common.Strings;
import org.opensearch.core.index.Index;
import org.opensearch.snapshots.SnapshotInProgressException;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java
index 35fb6a8748941..006da6b3cbb09 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java
@@ -38,7 +38,7 @@
import org.opensearch.action.support.master.AcknowledgedRequest;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java
index df05c3dd665d2..b0fc6856eb43c 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java
@@ -36,7 +36,7 @@
import org.opensearch.action.ActionResponse;
import org.opensearch.cluster.metadata.AliasMetadata;
import org.opensearch.cluster.metadata.MappingMetadata;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java
index c4c9094e276d6..6b3fff19d532f 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java
@@ -35,7 +35,7 @@
import org.opensearch.Version;
import org.opensearch.action.ActionResponse;
import org.opensearch.cluster.metadata.MappingMetadata;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java
index 465a44556c081..373331eb1554b 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java
@@ -43,12 +43,13 @@
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.index.Index;
@@ -250,7 +251,7 @@ public static XContentBuilder simpleMapping(String... source) {
builder.startObject(fieldName);
String[] s1 = Strings.splitStringByCommaToArray(source[i]);
for (String s : s1) {
- String[] s2 = org.opensearch.common.Strings.split(s, "=");
+ String[] s2 = Strings.split(s, "=");
if (s2.length != 2) {
throw new IllegalArgumentException("malformed " + s);
}
@@ -270,7 +271,7 @@ public static XContentBuilder simpleMapping(String... source) {
builder.startObject(fieldName);
String[] s1 = Strings.splitStringByCommaToArray(source[i]);
for (String s : s1) {
- String[] s2 = org.opensearch.common.Strings.split(s, "=");
+ String[] s2 = Strings.split(s, "=");
if (s2.length != 2) {
throw new IllegalArgumentException("malformed " + s);
}
@@ -298,7 +299,7 @@ public PutMappingRequest source(XContentBuilder mappingBuilder) {
*/
public PutMappingRequest source(Map mappingSource) {
try {
- XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON);
builder.map(mappingSource);
return source(BytesReference.bytes(builder), builder.contentType());
} catch (IOException e) {
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java
index d42f3699765e7..16451e311e7d3 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java
@@ -39,7 +39,7 @@
import org.opensearch.action.support.master.AcknowledgedRequest;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.io.IOException;
import java.util.Arrays;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java
index ca5a17f0a1520..b5097f96fe52b 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java
@@ -39,7 +39,7 @@
import org.opensearch.cluster.metadata.IndexMetadata.APIBlock;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import java.io.IOException;
import java.util.Objects;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java
index 39268d752c8c5..42dacfdb3ca2d 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java
@@ -35,11 +35,11 @@
import org.opensearch.core.action.support.DefaultShardOperationFailedException;
import org.opensearch.action.support.master.ShardsAcknowledgedResponse;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java
index 7664a73c27fc8..a7015a9d580df 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java
@@ -34,7 +34,7 @@
import org.opensearch.core.action.support.DefaultShardOperationFailedException;
import org.opensearch.action.support.broadcast.BroadcastResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java
index 63899668badca..99ff501c1eed8 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java
@@ -10,7 +10,7 @@
import org.opensearch.core.action.support.DefaultShardOperationFailedException;
import org.opensearch.action.support.broadcast.BroadcastResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java
index 280dc307447b7..e014d6d703500 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java
@@ -34,7 +34,7 @@
import org.opensearch.Version;
import org.opensearch.core.common.io.stream.NamedWriteable;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.ToXContentFragment;
import java.util.Objects;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java
index f0ab571ea9f75..faa3558420a5c 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java
@@ -34,8 +34,8 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParser;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java
index f58c842be374b..8503c9b882c93 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java
@@ -34,7 +34,7 @@
import org.opensearch.cluster.AbstractDiffable;
import org.opensearch.cluster.Diff;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java
index 95a4b6573611d..11a7555d15d3f 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java
@@ -40,7 +40,7 @@
import org.opensearch.core.ParseField;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.xcontent.ObjectParser;
import org.opensearch.core.xcontent.XContentParser;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java
index ed598c14acec3..1603f95f4d512 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java
@@ -36,7 +36,7 @@
import org.opensearch.action.support.clustermanager.ClusterManagerNodeOperationRequestBuilder;
import org.opensearch.client.OpenSearchClient;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
/**
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java
index 4ddff1563885a..ca2921ceb70c8 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java
@@ -55,7 +55,7 @@
import org.opensearch.common.Nullable;
import org.opensearch.common.inject.Inject;
import org.opensearch.core.common.io.stream.StreamInput;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.index.shard.DocsStats;
import org.opensearch.tasks.Task;
import org.opensearch.threadpool.ThreadPool;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java
index 05b06fc2b62c7..a1178fbe21f3c 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java
@@ -33,7 +33,6 @@
package org.opensearch.action.admin.indices.settings.get;
import org.opensearch.action.ActionResponse;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
@@ -193,7 +192,7 @@ public String toString() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
XContentBuilder builder = new XContentBuilder(JsonXContent.jsonXContent, baos);
toXContent(builder, ToXContent.EMPTY_PARAMS, false);
- return Strings.toString(builder);
+ return builder.toString();
} catch (IOException e) {
throw new IllegalStateException(e); // should not be possible here
}
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java
index 22cbcf804f9d2..d3bf275823278 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java
@@ -47,7 +47,7 @@
import org.opensearch.common.settings.IndexScopedSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.settings.SettingsFilter;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.core.index.Index;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportService;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java
index a7b7e005bce90..3c12f3eb8b728 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java
@@ -36,7 +36,7 @@
import org.opensearch.action.IndicesRequest;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.action.support.master.AcknowledgedRequest;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java
index b2e7ed92e608a..aef2dc8f2c7c8 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java
@@ -42,7 +42,7 @@
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.ParseField;
import org.opensearch.core.xcontent.ObjectParser;
import org.opensearch.core.xcontent.ToXContentObject;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java
index eb05c0a69b78b..855e678c77b9b 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java
@@ -37,7 +37,7 @@
import org.opensearch.action.support.master.AcknowledgedRequestBuilder;
import org.opensearch.client.OpenSearchClient;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
/**
* Transport request builder for resizing an index
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java
index 0d31c90a98f56..328768bc9ae0e 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java
@@ -57,7 +57,7 @@
import org.opensearch.core.index.shard.ShardId;
import org.opensearch.threadpool.ThreadPool;
import org.opensearch.transport.TransportService;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.index.store.StoreStats;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java
index 5a3a34e9a2ebe..e4abaef4ddfa8 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java
@@ -37,7 +37,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java
index 4014bad06ff9a..b262835dc2f2a 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java
@@ -36,7 +36,7 @@
import org.opensearch.core.action.support.DefaultShardOperationFailedException;
import org.opensearch.action.support.broadcast.BroadcastResponse;
import org.opensearch.cluster.routing.ShardRouting;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java
index 099f7c34ff818..011f10bfaf6d6 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java
@@ -41,7 +41,6 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
import org.opensearch.core.common.bytes.BytesArray;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -298,7 +297,7 @@ public PutIndexTemplateRequest mapping(Map source) {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.map(source);
- mappings = Strings.toString(builder);
+ mappings = builder.toString();
return this;
} catch (IOException e) {
throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e);
diff --git a/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java
index 7fe663a347ee3..28215475416ba 100644
--- a/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java
+++ b/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java
@@ -36,7 +36,7 @@
import org.opensearch.action.support.broadcast.BroadcastResponse;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.XContentBuilder;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java b/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java
index 2a85b7abb741a..bbf887b71cbb2 100644
--- a/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java
+++ b/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java
@@ -36,7 +36,7 @@
import org.apache.lucene.util.RamUsageEstimator;
import org.opensearch.action.DocWriteRequest;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java
index 26cd318e7a280..08a8e7b6d7865 100644
--- a/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java
+++ b/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java
@@ -41,7 +41,7 @@
import org.opensearch.action.index.IndexResponse;
import org.opensearch.action.update.UpdateResponse;
import org.opensearch.common.CheckedConsumer;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java
index 4695b44c4986b..a01fc82fffd01 100644
--- a/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java
+++ b/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java
@@ -41,8 +41,8 @@
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.collect.Tuple;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.unit.ByteSizeUnit;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeUnit;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.threadpool.Scheduler;
diff --git a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java
index 8053e1f8521cf..c4298e75f8302 100644
--- a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java
+++ b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java
@@ -32,7 +32,7 @@
package org.opensearch.action.fieldcaps;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java
index 550ef2d412ca4..5b14a0d5a40b4 100644
--- a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java
+++ b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java
@@ -34,7 +34,7 @@
import org.opensearch.action.ActionResponse;
import org.opensearch.core.ParseField;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.common.collect.Tuple;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
diff --git a/server/src/main/java/org/opensearch/action/get/GetResponse.java b/server/src/main/java/org/opensearch/action/get/GetResponse.java
index b713dc8a507d1..abb1ddfe041c9 100644
--- a/server/src/main/java/org/opensearch/action/get/GetResponse.java
+++ b/server/src/main/java/org/opensearch/action/get/GetResponse.java
@@ -35,7 +35,7 @@
import org.opensearch.OpenSearchParseException;
import org.opensearch.action.ActionResponse;
import org.opensearch.core.common.ParsingException;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.document.DocumentField;
import org.opensearch.core.common.io.stream.StreamInput;
diff --git a/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java b/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java
index e8d0c1b9d320f..3a28b123b6539 100644
--- a/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java
+++ b/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java
@@ -43,7 +43,7 @@
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.common.Nullable;
import org.opensearch.core.common.ParsingException;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequest.java b/server/src/main/java/org/opensearch/action/index/IndexRequest.java
index d686f0b460634..584bee0caaf2e 100644
--- a/server/src/main/java/org/opensearch/action/index/IndexRequest.java
+++ b/server/src/main/java/org/opensearch/action/index/IndexRequest.java
@@ -53,12 +53,12 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.lucene.uid.Versions;
-import org.opensearch.common.unit.ByteSizeValue;
-import org.opensearch.common.xcontent.XContentFactory;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.VersionType;
import org.opensearch.index.mapper.MapperService;
@@ -159,7 +159,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio
isRetry = in.readBoolean();
autoGeneratedTimestamp = in.readLong();
if (in.readBoolean()) {
- if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
contentType = in.readMediaType();
} else {
contentType = in.readEnum(XContentType.class);
@@ -378,7 +378,7 @@ public IndexRequest source(Map source) throws OpenSearchGenerationExc
*/
public IndexRequest source(Map source, MediaType contentType) throws OpenSearchGenerationException {
try {
- XContentBuilder builder = XContentFactory.contentBuilder(contentType);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(contentType);
builder.map(source);
return source(builder);
} catch (IOException e) {
@@ -434,7 +434,7 @@ public IndexRequest source(MediaType mediaType, Object... source) {
);
}
try {
- XContentBuilder builder = XContentFactory.contentBuilder(mediaType);
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(mediaType);
builder.startObject();
for (int i = 0; i < source.length; i++) {
builder.field(source[i++].toString(), source[i]);
@@ -670,7 +670,7 @@ private void writeBody(StreamOutput out) throws IOException {
out.writeLong(autoGeneratedTimestamp);
if (contentType != null) {
out.writeBoolean(true);
- if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
contentType.writeTo(out);
} else {
out.writeEnum((XContentType) contentType);
diff --git a/server/src/main/java/org/opensearch/action/index/IndexResponse.java b/server/src/main/java/org/opensearch/action/index/IndexResponse.java
index 12d788323b497..c7c2138a63b4e 100644
--- a/server/src/main/java/org/opensearch/action/index/IndexResponse.java
+++ b/server/src/main/java/org/opensearch/action/index/IndexResponse.java
@@ -33,7 +33,7 @@
package org.opensearch.action.index;
import org.opensearch.action.DocWriteResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.XContentParser;
diff --git a/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java
index a26fa413b2f5b..a22f499c4add4 100644
--- a/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java
+++ b/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java
@@ -33,7 +33,7 @@
package org.opensearch.action.ingest;
import org.opensearch.action.ActionResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
diff --git a/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java b/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java
index 7a88f817c70bf..f764e4b23860a 100644
--- a/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java
+++ b/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java
@@ -70,7 +70,7 @@ public PutPipelineRequest(StreamInput in) throws IOException {
super(in);
id = in.readString();
source = in.readBytesReference();
- if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType = in.readMediaType();
} else {
mediaType = in.readEnum(XContentType.class);
@@ -101,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(id);
out.writeBytesReference(source);
- if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType.writeTo(out);
} else {
out.writeEnum((XContentType) mediaType);
diff --git a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java
index 1ac441a1afe64..4837cfdd492b4 100644
--- a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java
+++ b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java
@@ -85,7 +85,7 @@ public SimulatePipelineRequest(BytesReference source, MediaType mediaType) {
id = in.readOptionalString();
verbose = in.readBoolean();
source = in.readBytesReference();
- if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType = in.readMediaType();
} else {
mediaType = in.readEnum(XContentType.class);
@@ -127,7 +127,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(id);
out.writeBoolean(verbose);
out.writeBytesReference(source);
- if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType.writeTo(out);
} else {
out.writeEnum((XContentType) mediaType);
diff --git a/server/src/main/java/org/opensearch/action/search/ClearScrollController.java b/server/src/main/java/org/opensearch/action/search/ClearScrollController.java
index eb0fa49a94050..c258b111fa1c6 100644
--- a/server/src/main/java/org/opensearch/action/search/ClearScrollController.java
+++ b/server/src/main/java/org/opensearch/action/search/ClearScrollController.java
@@ -41,7 +41,7 @@
import org.opensearch.common.util.concurrent.CountDown;
import org.opensearch.core.common.Strings;
import org.opensearch.transport.Transport;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.util.ArrayList;
import java.util.Collection;
diff --git a/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java b/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java
index 1e616ab5ca16e..b33f8a46c8f7a 100644
--- a/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java
+++ b/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java
@@ -15,7 +15,7 @@
import org.opensearch.core.xcontent.ConstructingObjectParser;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java
index 4211839ce6569..cf8d9cec779c8 100644
--- a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java
+++ b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java
@@ -9,7 +9,7 @@
package org.opensearch.action.search;
import org.opensearch.action.ActionResponse;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
diff --git a/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java b/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java
index c576d87d85b0f..7facd62f90bad 100644
--- a/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java
+++ b/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java
@@ -36,7 +36,7 @@
import org.opensearch.OpenSearchException;
import org.opensearch.action.ActionResponse;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java
index d32aab0c8a561..d0484b3a69a1e 100644
--- a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java
+++ b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java
@@ -47,7 +47,7 @@ public PutSearchPipelineRequest(StreamInput in) throws IOException {
super(in);
id = in.readString();
source = in.readBytesReference();
- if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType = in.readMediaType();
} else {
mediaType = in.readEnum(XContentType.class);
@@ -76,7 +76,7 @@ public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(id);
out.writeBytesReference(source);
- if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType.writeTo(out);
} else {
out.writeEnum((XContentType) mediaType);
diff --git a/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java b/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java
index 45c2dc4f29403..43c95133f12d6 100644
--- a/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java
+++ b/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java
@@ -35,8 +35,8 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.search.TopDocs;
-import org.opensearch.common.breaker.CircuitBreaker;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.lucene.search.TopDocsAndMaxScore;
import org.opensearch.common.util.concurrent.AbstractRunnable;
diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java
index 512d3295c4cfc..2f002d21d9b68 100644
--- a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java
+++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java
@@ -45,7 +45,7 @@
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation;
import org.apache.lucene.search.grouping.CollapseTopFieldDocs;
-import org.opensearch.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreaker;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.lucene.search.TopDocsAndMaxScore;
import org.opensearch.search.DocValueFormat;
diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java
index b3ed42824e91a..998ba720c4523 100644
--- a/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java
+++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java
@@ -37,7 +37,7 @@
import org.opensearch.core.action.ShardOperationFailedException;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.rest.RestStatus;
diff --git a/server/src/main/java/org/opensearch/action/search/SearchResponse.java b/server/src/main/java/org/opensearch/action/search/SearchResponse.java
index c7ab8f0858e7b..d4ebc0a2363af 100644
--- a/server/src/main/java/org/opensearch/action/search/SearchResponse.java
+++ b/server/src/main/java/org/opensearch/action/search/SearchResponse.java
@@ -35,7 +35,7 @@
import org.apache.lucene.search.TotalHits;
import org.opensearch.action.ActionResponse;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java
index 5a280818640ed..37ffca6cac5f2 100644
--- a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java
+++ b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java
@@ -65,7 +65,7 @@
import org.opensearch.transport.TransportException;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestOptions;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import org.opensearch.transport.TransportService;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java
index c4af9ffa20194..1011f17c98dd6 100644
--- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java
+++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java
@@ -57,7 +57,7 @@
import org.opensearch.cluster.routing.ShardIterator;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.Nullable;
-import org.opensearch.common.breaker.CircuitBreaker;
+import org.opensearch.core.common.breaker.CircuitBreaker;
import org.opensearch.common.inject.Inject;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.core.common.io.stream.Writeable;
@@ -70,7 +70,7 @@
import org.opensearch.core.index.Index;
import org.opensearch.index.query.Rewriteable;
import org.opensearch.core.index.shard.ShardId;
-import org.opensearch.indices.breaker.CircuitBreakerService;
+import org.opensearch.core.indices.breaker.CircuitBreakerService;
import org.opensearch.search.SearchPhaseResult;
import org.opensearch.search.SearchService;
import org.opensearch.search.SearchShardTarget;
diff --git a/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java b/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java
index da39aed20ef8e..1db8fc48c28bc 100644
--- a/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java
+++ b/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java
@@ -10,7 +10,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java b/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java
index 5b0475093d3c2..07851345241bd 100644
--- a/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java
+++ b/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java
@@ -35,7 +35,7 @@
import org.opensearch.action.ActionListener;
import org.opensearch.transport.TransportChannel;
import org.opensearch.transport.TransportRequest;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
/**
* Listener for transport channel actions
diff --git a/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java b/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java
index 39c524448bc5d..9603f886366f2 100644
--- a/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java
+++ b/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java
@@ -35,7 +35,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.index.shard.ShardId;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
index bf71134ab7b88..1b7822ee5a440 100644
--- a/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
+++ b/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
@@ -63,7 +63,7 @@
import org.opensearch.transport.TransportException;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestHandler;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportService;
import org.opensearch.transport.TransportRequestOptions;
diff --git a/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java b/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java
index 4a94f790c3443..8a4e12567b515 100644
--- a/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java
+++ b/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java
@@ -35,7 +35,7 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java
index 1affc9202c32b..d3a617853b019 100644
--- a/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java
+++ b/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java
@@ -47,7 +47,7 @@
import org.opensearch.cluster.routing.IndexShardRoutingTable;
import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.common.Nullable;
-import org.opensearch.common.breaker.CircuitBreakingException;
+import org.opensearch.core.common.breaker.CircuitBreakingException;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
diff --git a/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java b/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java
index 5aa37ec65ff0d..da00183515ae5 100644
--- a/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java
+++ b/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java
@@ -32,7 +32,7 @@
package org.opensearch.action.support.replication;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java b/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java
index 43ece159247bc..f8d96d57b341b 100644
--- a/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java
+++ b/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java
@@ -38,7 +38,7 @@
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.regex.Regex;
import org.opensearch.common.unit.TimeValue;
-import org.opensearch.common.util.CollectionUtils;
+import org.opensearch.core.common.util.CollectionUtils;
import org.opensearch.core.common.Strings;
import org.opensearch.tasks.Task;
import org.opensearch.tasks.TaskId;
diff --git a/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java
index bfd207e6f969f..e06858ab1a201 100644
--- a/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java
+++ b/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java
@@ -57,7 +57,7 @@
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestHandler;
import org.opensearch.transport.TransportRequestOptions;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportService;
diff --git a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java
index 71200b05d70ad..f15a039fd9305 100644
--- a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java
+++ b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java
@@ -49,8 +49,8 @@
import org.opensearch.common.lucene.uid.Versions;
import org.opensearch.common.util.set.Sets;
import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.index.VersionType;
import org.opensearch.index.mapper.MapperService;
@@ -186,7 +186,7 @@ public TermVectorsRequest() {}
if (in.readBoolean()) {
doc = in.readBytesReference();
- if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType = in.readMediaType();
} else {
mediaType = in.readEnum(XContentType.class);
@@ -306,7 +306,7 @@ public TermVectorsRequest doc(XContentBuilder documentBuilder) {
*/
@Deprecated
public TermVectorsRequest doc(BytesReference doc, boolean generateRandomId) {
- return this.doc(doc, generateRandomId, XContentHelper.xContentType(doc));
+ return this.doc(doc, generateRandomId, MediaTypeRegistry.xContentType(doc));
}
/**
@@ -538,7 +538,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(doc != null);
if (doc != null) {
out.writeBytesReference(doc);
- if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
mediaType.writeTo(out);
} else {
out.writeEnum((XContentType) mediaType);
diff --git a/server/src/main/java/org/opensearch/action/update/UpdateRequest.java b/server/src/main/java/org/opensearch/action/update/UpdateRequest.java
index 44454630ff24d..86ebc0d9b69d6 100644
--- a/server/src/main/java/org/opensearch/action/update/UpdateRequest.java
+++ b/server/src/main/java/org/opensearch/action/update/UpdateRequest.java
@@ -49,11 +49,11 @@
import org.opensearch.core.ParseField;
import org.opensearch.core.common.Strings;
import org.opensearch.core.xcontent.MediaType;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.ObjectParser;
import org.opensearch.core.xcontent.ToXContentObject;
import org.opensearch.core.xcontent.XContentBuilder;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
@@ -107,12 +107,12 @@ public class UpdateRequest extends InstanceShardOperationRequest
);
PARSER.declareBoolean(UpdateRequest::scriptedUpsert, SCRIPTED_UPSERT_FIELD);
PARSER.declareObject((request, builder) -> request.safeUpsertRequest().source(builder), (parser, context) -> {
- XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
+ XContentBuilder builder = MediaTypeRegistry.contentBuilder(parser.contentType());
builder.copyCurrentStructure(parser);
return builder;
}, UPSERT_FIELD);
PARSER.declareObject((request, builder) -> request.safeDoc().source(builder), (parser, context) -> {
- XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
+ XContentBuilder docBuilder = MediaTypeRegistry.contentBuilder(parser.contentType());
docBuilder.copyCurrentStructure(parser);
return docBuilder;
}, DOC_FIELD);
diff --git a/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java b/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java
index 2a23b501a8a0e..9383260d1bd73 100644
--- a/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java
+++ b/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java
@@ -55,7 +55,7 @@
import org.opensearch.common.settings.SecureSettings;
import org.opensearch.core.common.settings.SecureString;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.BoundTransportAddress;
+import org.opensearch.core.common.transport.BoundTransportAddress;
import org.opensearch.common.util.io.IOUtils;
import org.opensearch.env.Environment;
import org.opensearch.monitor.jvm.JvmInfo;
diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java
index c27c149947444..f9661e71d60e6 100644
--- a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java
+++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java
@@ -42,8 +42,8 @@
import org.opensearch.common.SuppressForbidden;
import org.opensearch.common.io.PathUtils;
import org.opensearch.common.settings.Setting;
-import org.opensearch.common.transport.BoundTransportAddress;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.BoundTransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.discovery.DiscoveryModule;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexModule;
diff --git a/server/src/main/java/org/opensearch/cluster/ClusterInfo.java b/server/src/main/java/org/opensearch/cluster/ClusterInfo.java
index 3793b5094a4cb..1513dc4e5acf7 100644
--- a/server/src/main/java/org/opensearch/cluster/ClusterInfo.java
+++ b/server/src/main/java/org/opensearch/cluster/ClusterInfo.java
@@ -34,7 +34,7 @@
import org.opensearch.Version;
import org.opensearch.cluster.routing.ShardRouting;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
@@ -110,7 +110,7 @@ public ClusterInfo(StreamInput in) throws IOException {
this.shardSizes = Collections.unmodifiableMap(sizeMap);
this.routingToDataPath = Collections.unmodifiableMap(routingMap);
this.reservedSpace = Collections.unmodifiableMap(reservedSpaceMap);
- if (in.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_2_10_0)) {
this.nodeFileCacheStats = in.readMap(StreamInput::readString, FileCacheStats::new);
} else {
this.nodeFileCacheStats = Map.of();
@@ -124,7 +124,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeMap(this.shardSizes, StreamOutput::writeString, (o, v) -> out.writeLong(v == null ? -1 : v));
out.writeMap(this.routingToDataPath, (o, k) -> k.writeTo(o), StreamOutput::writeString);
out.writeMap(this.reservedSpace, (o, v) -> v.writeTo(o), (o, v) -> v.writeTo(o));
- if (out.getVersion().onOrAfter(Version.V_3_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_2_10_0)) {
out.writeMap(this.nodeFileCacheStats, StreamOutput::writeString, (o, v) -> v.writeTo(o));
}
}
diff --git a/server/src/main/java/org/opensearch/cluster/ClusterModule.java b/server/src/main/java/org/opensearch/cluster/ClusterModule.java
index b80fd1d746831..e797a08aba3cd 100644
--- a/server/src/main/java/org/opensearch/cluster/ClusterModule.java
+++ b/server/src/main/java/org/opensearch/cluster/ClusterModule.java
@@ -359,7 +359,7 @@ public static Collection createAllocationDeciders(
addAllocationDecider(deciders, new ConcurrentRebalanceAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new ConcurrentRecoveriesAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new EnableAllocationDecider(settings, clusterSettings));
- addAllocationDecider(deciders, new NodeVersionAllocationDecider());
+ addAllocationDecider(deciders, new NodeVersionAllocationDecider(settings));
addAllocationDecider(deciders, new SnapshotInProgressAllocationDecider());
addAllocationDecider(deciders, new RestoreInProgressAllocationDecider());
addAllocationDecider(deciders, new FilterAllocationDecider(settings, clusterSettings));
diff --git a/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java b/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java
index ee5c8c00dfaf4..408344f476fe2 100644
--- a/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java
+++ b/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java
@@ -12,7 +12,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.settings.Settings;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
import java.util.Objects;
diff --git a/server/src/main/java/org/opensearch/cluster/DiskUsage.java b/server/src/main/java/org/opensearch/cluster/DiskUsage.java
index 961bfce053243..c472522baee51 100644
--- a/server/src/main/java/org/opensearch/cluster/DiskUsage.java
+++ b/server/src/main/java/org/opensearch/cluster/DiskUsage.java
@@ -32,11 +32,11 @@
package org.opensearch.cluster;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
-import org.opensearch.common.unit.ByteSizeValue;
+import org.opensearch.core.common.unit.ByteSizeValue;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
diff --git a/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java b/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java
index 0014d5c61fb2d..b5a9e4e7b30a0 100644
--- a/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java
+++ b/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java
@@ -44,7 +44,7 @@
import org.opensearch.cluster.node.DiscoveryNodes;
import org.opensearch.cluster.service.ClusterApplier;
import org.opensearch.common.Nullable;
-import org.opensearch.common.component.AbstractLifecycleComponent;
+import org.opensearch.common.lifecycle.AbstractLifecycleComponent;
import org.opensearch.common.inject.Inject;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
diff --git a/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java b/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java
index c399bd59dbbe1..be471ab6a68ec 100644
--- a/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java
+++ b/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java
@@ -33,7 +33,7 @@
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java
index 855f7755419d8..5f9c93ff254c9 100644
--- a/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java
+++ b/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java
@@ -35,7 +35,7 @@
import org.opensearch.Version;
import org.opensearch.cluster.ClusterState.Custom;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java b/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java
index e6781fc22e1a7..47e4e59dadd3f 100644
--- a/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java
+++ b/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java
@@ -48,7 +48,7 @@
import org.opensearch.transport.TransportChannel;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestHandler;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import org.opensearch.transport.TransportService;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java
index 9d9b6c52f6b25..30dcb5fd08954 100644
--- a/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java
+++ b/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java
@@ -74,7 +74,7 @@
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestDeduplicator;
import org.opensearch.transport.TransportRequestHandler;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import org.opensearch.transport.TransportService;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java
index 75b73be8fa12e..1520a293d2741 100644
--- a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java
+++ b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java
@@ -14,7 +14,7 @@
import org.opensearch.cluster.routing.RoutingNode;
import org.opensearch.cluster.routing.ShardRoutingState;
import org.opensearch.cluster.routing.WeightedRouting;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java
index 340fcfe0d0d31..08832cb1e8807 100644
--- a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java
+++ b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java
@@ -11,7 +11,7 @@
import org.opensearch.OpenSearchParseException;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.node.DiscoveryNode;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java
index 9b51e56dce966..05a5ac862a5b1 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java
@@ -40,7 +40,7 @@
import org.opensearch.common.Nullable;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.AbstractRunnable;
import org.opensearch.gateway.GatewayMetaState;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java
index de751d881bc0e..4429136525534 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java
@@ -61,13 +61,13 @@
import org.opensearch.common.Nullable;
import org.opensearch.common.Priority;
import org.opensearch.common.SetOnce;
-import org.opensearch.common.Strings;
-import org.opensearch.common.component.AbstractLifecycleComponent;
+import org.opensearch.common.lifecycle.AbstractLifecycleComponent;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.transport.TransportAddress;
+import org.opensearch.core.common.transport.TransportAddress;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.OpenSearchExecutors;
import org.opensearch.common.util.concurrent.ListenableFuture;
@@ -86,7 +86,7 @@
import org.opensearch.monitor.StatusInfo;
import org.opensearch.threadpool.Scheduler;
import org.opensearch.threadpool.ThreadPool.Names;
-import org.opensearch.transport.TransportResponse.Empty;
+import org.opensearch.core.transport.TransportResponse.Empty;
import org.opensearch.transport.TransportService;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java b/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java
index 08008152cfcd6..94d2e11ab591e 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java
@@ -55,7 +55,7 @@
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestOptions;
import org.opensearch.transport.TransportRequestOptions.Type;
-import org.opensearch.transport.TransportResponse.Empty;
+import org.opensearch.core.transport.TransportResponse.Empty;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportService;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java
index 42f09f95a7f56..f923176efa5e5 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java
@@ -67,8 +67,8 @@
import org.opensearch.transport.TransportException;
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestOptions;
-import org.opensearch.transport.TransportResponse;
-import org.opensearch.transport.TransportResponse.Empty;
+import org.opensearch.core.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse.Empty;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportService;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java b/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java
index f43abf0080575..009a2121a5886 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java
@@ -56,8 +56,8 @@
import org.opensearch.transport.TransportRequest;
import org.opensearch.transport.TransportRequestOptions;
import org.opensearch.transport.TransportRequestOptions.Type;
-import org.opensearch.transport.TransportResponse;
-import org.opensearch.transport.TransportResponse.Empty;
+import org.opensearch.core.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse.Empty;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportService;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java b/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java
index b68f689ef63fd..8a70c71d53fdd 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java
@@ -35,7 +35,7 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
import java.util.List;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java b/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java
index c8186441db449..9c683f7de0878 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java
@@ -34,7 +34,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
import java.util.Objects;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Publication.java b/server/src/main/java/org/opensearch/cluster/coordination/Publication.java
index 429890e7420de..6ffca828ecb06 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/Publication.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/Publication.java
@@ -43,7 +43,7 @@
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.transport.TransportException;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.util.ArrayList;
import java.util.List;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java
index 60c931a601561..64c3b93e0e0be 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java
@@ -51,7 +51,7 @@
import org.opensearch.transport.TransportChannel;
import org.opensearch.transport.TransportException;
import org.opensearch.transport.TransportRequestOptions;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import org.opensearch.transport.TransportResponseHandler;
import org.opensearch.transport.TransportService;
diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java b/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java
index f99ba82be5514..f6350c5558a82 100644
--- a/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java
+++ b/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java
@@ -33,7 +33,7 @@
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
-import org.opensearch.transport.TransportResponse;
+import org.opensearch.core.transport.TransportResponse;
import java.io.IOException;
import java.util.Optional;
diff --git a/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java b/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java
index 8af783bbdc52e..5a9c82a3849e9 100644
--- a/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java
+++ b/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java
@@ -14,7 +14,7 @@
import org.opensearch.cluster.NamedDiff;
import org.opensearch.cluster.metadata.Metadata;
import org.opensearch.cluster.metadata.Metadata.Custom;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java b/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java
index 00a83c85c17be..32b54ac947ebd 100644
--- a/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java
+++ b/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java
@@ -37,7 +37,7 @@
import org.opensearch.cluster.routing.ShardRouting;
import org.opensearch.cluster.routing.UnassignedInfo;
import org.opensearch.cluster.routing.UnassignedInfo.AllocationStatus;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java
index 44f6f2d7313a3..5e4de1be71214 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java
@@ -45,6 +45,7 @@
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.common.Strings;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
@@ -259,7 +260,7 @@ public static Diff readDiffFrom(StreamInput in) throws IOExceptio
@Override
public String toString() {
- return org.opensearch.common.Strings.toString(XContentType.JSON, this, true, true);
+ return Strings.toString(XContentType.JSON, this, true, true);
}
@Override
@@ -307,7 +308,7 @@ public Builder filter(String filter) {
this.filter = null;
return this;
}
- return filter(XContentHelper.convertToMap(XContentFactory.xContent(filter), filter, true));
+ return filter(XContentHelper.convertToMap(MediaTypeRegistry.xContent(filter).xContent(), filter, true));
}
public Builder filter(Map filter) {
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java
index 5a019804f5eac..9b9b91a07a5cf 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java
@@ -36,10 +36,10 @@
import org.opensearch.common.Nullable;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
+import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.core.common.Strings;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.NamedXContentRegistry;
-import org.opensearch.common.xcontent.XContentFactory;
-import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryShardContext;
@@ -88,7 +88,7 @@ public void validateAliasStandalone(Alias alias) {
validateAliasStandalone(alias.name(), alias.indexRouting());
if (Strings.hasLength(alias.filter())) {
try {
- XContentHelper.convertToMap(XContentFactory.xContent(alias.filter()), alias.filter(), false);
+ XContentHelper.convertToMap(MediaTypeRegistry.xContent(alias.filter()).xContent(), alias.filter(), false);
} catch (Exception e) {
throw new IllegalArgumentException("failed to parse filter for alias [" + alias.name() + "]", e);
}
@@ -134,7 +134,8 @@ public void validateAliasFilter(
) {
assert queryShardContext != null;
try (
- XContentParser parser = XContentFactory.xContent(filter)
+ XContentParser parser = MediaTypeRegistry.xContent(filter)
+ .xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, filter)
) {
validateAliasFilter(parser, queryShardContext);
@@ -158,7 +159,7 @@ public void validateAliasFilter(
try (
InputStream inputStream = filter.streamInput();
- XContentParser parser = XContentFactory.xContentType(inputStream)
+ XContentParser parser = MediaTypeRegistry.xContentType(inputStream)
.xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, filter.streamInput())
) {
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java
index 52096422248a5..7c5e0f664df4e 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java
@@ -35,7 +35,7 @@
import org.opensearch.cluster.AbstractDiffable;
import org.opensearch.cluster.Diff;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java
index ce806b2aa1f12..2e700389e4fc9 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java
@@ -36,7 +36,7 @@
import org.opensearch.cluster.Diff;
import org.opensearch.cluster.DiffableUtils;
import org.opensearch.cluster.NamedDiff;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java
index 15e5cb5873719..b9b7c132ba2cf 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java
@@ -36,7 +36,7 @@
import org.opensearch.cluster.Diff;
import org.opensearch.cluster.metadata.DataStream.TimestampField;
import org.opensearch.common.Nullable;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.common.io.stream.Writeable;
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java
index d34416c70dc16..b72c0fdf81e41 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java
@@ -36,7 +36,7 @@
import org.opensearch.cluster.Diff;
import org.opensearch.cluster.DiffableUtils;
import org.opensearch.cluster.NamedDiff;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.core.ParseField;
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java
index 89fe6e9be2320..c7854355ea5cc 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java
@@ -36,7 +36,7 @@
import org.opensearch.cluster.Diff;
import org.opensearch.cluster.DiffableUtils;
import org.opensearch.cluster.NamedDiff;
-import org.opensearch.common.Strings;
+import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentType;
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java
index 1ba38daa40566..267abbbd6b6fe 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java
@@ -53,9 +53,9 @@
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Setting.Property;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.core.common.Strings;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.ToXContentFragment;
import org.opensearch.core.xcontent.XContentBuilder;
@@ -82,6 +82,7 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
@@ -285,6 +286,32 @@ public Iterator> settings() {
SETTING_REPLICATION_TYPE,
ReplicationType.DOCUMENT.toString(),
ReplicationType::parseString,
+ new Setting.Validator<>() {
+
+ @Override
+ public void validate(final ReplicationType value) {}
+
+ @Override
+ public void validate(final ReplicationType value, final Map, Object> settings) {
+ final Object remoteStoreEnabled = settings.get(INDEX_REMOTE_STORE_ENABLED_SETTING);
+ if (ReplicationType.SEGMENT.equals(value) == false && Objects.equals(remoteStoreEnabled, true)) {
+ throw new IllegalArgumentException(
+ "To enable "
+ + INDEX_REMOTE_STORE_ENABLED_SETTING.getKey()
+ + ", "
+ + INDEX_REPLICATION_TYPE_SETTING.getKey()
+ + " should be set to "
+ + ReplicationType.SEGMENT
+ );
+ }
+ }
+
+ @Override
+ public Iterator> settings() {
+ final List> settings = List.of(INDEX_REMOTE_STORE_ENABLED_SETTING);
+ return settings.iterator();
+ }
+ },
Property.IndexScope,
Property.Final
);
@@ -328,13 +355,14 @@ public Iterator> settings() {
}
},
Property.IndexScope,
- Property.Final
+ Property.PrivateIndex,
+ Property.Dynamic
);
/**
* Used to specify remote store repository to use for this index.
*/
- public static final Setting INDEX_REMOTE_STORE_REPOSITORY_SETTING = Setting.simpleString(
+ public static final Setting INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING = Setting.simpleString(
SETTING_REMOTE_SEGMENT_STORE_REPOSITORY,
new Setting.Validator<>() {
@@ -345,10 +373,12 @@ public void validate(final String value) {}
public void validate(final String value, final Map, Object> settings) {
if (value == null || value.isEmpty()) {
throw new IllegalArgumentException(
- "Setting " + INDEX_REMOTE_STORE_REPOSITORY_SETTING.getKey() + " should be provided with non-empty repository ID"
+ "Setting "
+ + INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey()
+ + " should be provided with non-empty repository ID"
);
} else {
- validateRemoteStoreSettingEnabled(settings, INDEX_REMOTE_STORE_REPOSITORY_SETTING);
+ validateRemoteStoreSettingEnabled(settings, INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING);
}
}
@@ -359,7 +389,8 @@ public Iterator> settings() {
}
},
Property.IndexScope,
- Property.Final
+ Property.PrivateIndex,
+ Property.Dynamic
);
private static void validateRemoteStoreSettingEnabled(final Map